From solipsis at pitrou.net Mon Aug 1 05:27:50 2011 From: solipsis at pitrou.net (solipsis at pitrou.net) Date: Mon, 01 Aug 2011 05:27:50 +0200 Subject: [Python-checkins] Daily reference leaks (fe0d3fab0812): sum=0 Message-ID: results for fe0d3fab0812 on branch "default" -------------------------------------------- Command line was: ['./python', '-m', 'test.regrtest', '-uall', '-R', '3:3:/home/antoine/cpython/refleaks/reflog2wKhef', '-x'] From python-checkins at python.org Mon Aug 1 12:33:08 2011 From: python-checkins at python.org (vinay.sajip) Date: Mon, 01 Aug 2011 12:33:08 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=282=2E7=29=3A_Closes_=2312667?= =?utf8?q?=3A_Corrected_documentation_for_SMTPHandler_secure_argument=2E?= Message-ID: http://hg.python.org/cpython/rev/42f40f53fd73 changeset: 71660:42f40f53fd73 branch: 2.7 parent: 71657:84021f563007 user: Vinay Sajip date: Mon Aug 01 11:28:02 2011 +0100 summary: Closes #12667: Corrected documentation for SMTPHandler secure argument. files: Doc/library/logging.handlers.rst | 10 ++++++++-- 1 files changed, 8 insertions(+), 2 deletions(-) diff --git a/Doc/library/logging.handlers.rst b/Doc/library/logging.handlers.rst --- a/Doc/library/logging.handlers.rst +++ b/Doc/library/logging.handlers.rst @@ -613,8 +613,14 @@ port, use the (host, port) tuple format for the *mailhost* argument. If you use a string, the standard SMTP port is used. If your SMTP server requires authentication, you can specify a (username, password) tuple for the - *credentials* argument. If *secure* is True, then the handler will attempt - to use TLS for the email transmission. + *credentials* argument. + + To specify the use of a secure protocol (TLS), pass in a tuple to the + *secure* argument. This will only be used when authentication credentials are + supplied. The tuple should be either an empty tuple, or a single-value tuple + with the name of a keyfile, or a 2-value tuple with the names of the keyfile + and certificate file. (This tuple is passed to the + :meth:`smtplib.SMTP.starttls` method.) .. versionchanged:: 2.6 *credentials* was added. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Aug 1 12:33:09 2011 From: python-checkins at python.org (vinay.sajip) Date: Mon, 01 Aug 2011 12:33:09 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=283=2E2=29=3A_Closes_=2312667?= =?utf8?q?=3A_Added_documentation_for_SMTPHandler_secure_argument=2E?= Message-ID: http://hg.python.org/cpython/rev/ba5bd8c1ae27 changeset: 71661:ba5bd8c1ae27 branch: 3.2 parent: 71658:14b25e4a5591 user: Vinay Sajip date: Mon Aug 01 11:31:52 2011 +0100 summary: Closes #12667: Added documentation for SMTPHandler secure argument. files: Doc/library/logging.handlers.rst | 8 +++++++- 1 files changed, 7 insertions(+), 1 deletions(-) diff --git a/Doc/library/logging.handlers.rst b/Doc/library/logging.handlers.rst --- a/Doc/library/logging.handlers.rst +++ b/Doc/library/logging.handlers.rst @@ -614,7 +614,7 @@ supports sending logging messages to an email address via SMTP. -.. class:: SMTPHandler(mailhost, fromaddr, toaddrs, subject, credentials=None) +.. class:: SMTPHandler(mailhost, fromaddr, toaddrs, subject, credentials=None, secure=None) Returns a new instance of the :class:`SMTPHandler` class. The instance is initialized with the from and to addresses and subject line of the email. The @@ -623,6 +623,12 @@ the standard SMTP port is used. If your SMTP server requires authentication, you can specify a (username, password) tuple for the *credentials* argument. + To specify the use of a secure protocol (TLS), pass in a tuple to the + *secure* argument. This will only be used when authentication credentials are + supplied. The tuple should be either an empty tuple, or a single-value tuple + with the name of a keyfile, or a 2-value tuple with the names of the keyfile + and certificate file. (This tuple is passed to the + :meth:`smtplib.SMTP.starttls` method.) .. method:: emit(record) -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Aug 1 12:33:09 2011 From: python-checkins at python.org (vinay.sajip) Date: Mon, 01 Aug 2011 12:33:09 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=28merge_3=2E2_-=3E_default=29?= =?utf8?q?=3A_Closes_=2312667=3A_Merged_fix_from_3=2E2=2E?= Message-ID: http://hg.python.org/cpython/rev/2d69900c0820 changeset: 71662:2d69900c0820 parent: 71659:fe0d3fab0812 parent: 71661:ba5bd8c1ae27 user: Vinay Sajip date: Mon Aug 01 11:32:49 2011 +0100 summary: Closes #12667: Merged fix from 3.2. files: Doc/library/logging.handlers.rst | 8 +++++++- 1 files changed, 7 insertions(+), 1 deletions(-) diff --git a/Doc/library/logging.handlers.rst b/Doc/library/logging.handlers.rst --- a/Doc/library/logging.handlers.rst +++ b/Doc/library/logging.handlers.rst @@ -623,7 +623,7 @@ supports sending logging messages to an email address via SMTP. -.. class:: SMTPHandler(mailhost, fromaddr, toaddrs, subject, credentials=None) +.. class:: SMTPHandler(mailhost, fromaddr, toaddrs, subject, credentials=None, secure=None) Returns a new instance of the :class:`SMTPHandler` class. The instance is initialized with the from and to addresses and subject line of the email. The @@ -632,6 +632,12 @@ the standard SMTP port is used. If your SMTP server requires authentication, you can specify a (username, password) tuple for the *credentials* argument. + To specify the use of a secure protocol (TLS), pass in a tuple to the + *secure* argument. This will only be used when authentication credentials are + supplied. The tuple should be either an empty tuple, or a single-value tuple + with the name of a keyfile, or a 2-value tuple with the names of the keyfile + and certificate file. (This tuple is passed to the + :meth:`smtplib.SMTP.starttls` method.) .. method:: emit(record) -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Aug 1 14:45:23 2011 From: python-checkins at python.org (eric.araujo) Date: Mon, 01 Aug 2011 14:45:23 +0200 Subject: [Python-checkins] =?utf8?b?Y3B5dGhvbiAoMy4yKTogTGV0IOKAnG1ha2Ug?= =?utf8?q?patchcheck=E2=80=9D_work_for_out-of-dir_builds_=28=239860=29?= Message-ID: http://hg.python.org/cpython/rev/313a71664781 changeset: 71663:313a71664781 branch: 3.2 parent: 71631:febf911c2e95 user: ?ric Araujo date: Sat Jul 30 21:34:04 2011 +0200 summary: Let ?make patchcheck? work for out-of-dir builds (#9860) files: Tools/scripts/patchcheck.py | 20 +++++++++++++------- 1 files changed, 13 insertions(+), 7 deletions(-) diff --git a/Tools/scripts/patchcheck.py b/Tools/scripts/patchcheck.py --- a/Tools/scripts/patchcheck.py +++ b/Tools/scripts/patchcheck.py @@ -4,11 +4,15 @@ import shutil import os.path import subprocess +import sysconfig import reindent import untabify +SRCDIR = sysconfig.get_config_var('srcdir') + + def n_files_str(count): """Return 'N file(s)' with the proper plurality on 'file'.""" return "{} file{}".format(count, "s" if count != 1 else "") @@ -36,7 +40,7 @@ info=lambda x: n_files_str(len(x))) def changed_files(): """Get the list of changed or added files from the VCS.""" - if os.path.isdir('.hg'): + if os.path.isdir(os.path.join(SRCDIR, '.hg')): vcs = 'hg' cmd = 'hg status --added --modified --no-status' elif os.path.isdir('.svn'): @@ -75,7 +79,7 @@ reindent.makebackup = False # No need to create backups. fixed = [] for path in (x for x in file_paths if x.endswith('.py')): - if reindent.check(path): + if reindent.check(os.path.join(SRCDIR, path)): fixed.append(path) return fixed @@ -85,10 +89,11 @@ """Report if any C files """ fixed = [] for path in file_paths: - with open(path, 'r') as f: + abspath = os.path.join(SRCDIR, path) + with open(abspath, 'r') as f: if '\t' not in f.read(): continue - untabify.process(path, 8, verbose=False) + untabify.process(abspath, 8, verbose=False) fixed.append(path) return fixed @@ -99,13 +104,14 @@ def normalize_docs_whitespace(file_paths): fixed = [] for path in file_paths: + abspath = os.path.join(SRCDIR, path) try: - with open(path, 'rb') as f: + with open(abspath, 'rb') as f: lines = f.readlines() new_lines = [ws_re.sub(br'\1', line) for line in lines] if new_lines != lines: - shutil.copyfile(path, path + '.bak') - with open(path, 'wb') as f: + shutil.copyfile(abspath, abspath + '.bak') + with open(abspath, 'wb') as f: f.writelines(new_lines) fixed.append(path) except Exception as err: -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Aug 1 14:45:24 2011 From: python-checkins at python.org (eric.araujo) Date: Mon, 01 Aug 2011 14:45:24 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=283=2E2=29=3A_Fix_regression_?= =?utf8?q?with_distutils_MANIFEST_handing_=28=2311104=2C_=238688=29=2E?= Message-ID: http://hg.python.org/cpython/rev/5d3e22d69d4f changeset: 71664:5d3e22d69d4f branch: 3.2 user: ?ric Araujo date: Sun Jul 31 04:06:12 2011 +0200 summary: Fix regression with distutils MANIFEST handing (#11104, #8688). The changed behavior of sdist in 3.1 broke packaging for projects that wanted to use a manually-maintained MANIFEST file (instead of having a MANIFEST.in template and letting distutils generate the MANIFEST). The fixes that were committed for #8688 (76643c286b9f by Tarek and d54da9248ed9 by me) did not fix all issues exposed in the bug report, and also added one problem: the MANIFEST file format gained comments, but the read_manifest method was not updated to handle (i.e. ignore) them. This changeset should fix everything; the tests have been expanded and I successfully tested the 2.7 version with Mercurial, which suffered from this regression. I have grouped the versionchanged directives for these bugs in one place and added micro version numbers to help users know the quirks of the exact version they?re using. Initial report, thorough diagnosis and patch by John Dennis, further work on the patch by Stephen Thorne, and a few edits and additions by me. files: Doc/distutils/sourcedist.rst | 25 ++++++--- Lib/distutils/command/sdist.py | 48 +++++++++++------- Lib/distutils/tests/test_sdist.py | 39 +++++++++++++-- Misc/ACKS | 2 + Misc/NEWS | 3 + 5 files changed, 84 insertions(+), 33 deletions(-) diff --git a/Doc/distutils/sourcedist.rst b/Doc/distutils/sourcedist.rst --- a/Doc/distutils/sourcedist.rst +++ b/Doc/distutils/sourcedist.rst @@ -103,10 +103,20 @@ :file:`MANIFEST`, you must specify everything: the default set of files described above does not apply in this case. -.. versionadded:: 3.1 +.. versionchanged:: 3.1 + An existing generated :file:`MANIFEST` will be regenerated without + :command:`sdist` comparing its modification time to the one of + :file:`MANIFEST.in` or :file:`setup.py`. + +.. versionchanged:: 3.1.3 :file:`MANIFEST` files start with a comment indicating they are generated. Files without this comment are not overwritten or removed. +.. versionchanged:: 3.2.2 + :command:`sdist` will read a :file:`MANIFEST` file if no :file:`MANIFEST.in` + exists, like it used to do. + + The manifest template has one command per line, where each command specifies a set of files to include or exclude from the source distribution. For an example, again we turn to the Distutils' own manifest template:: @@ -185,8 +195,12 @@ The normal course of operations for the :command:`sdist` command is as follows: -* if the manifest file, :file:`MANIFEST` doesn't exist, read :file:`MANIFEST.in` - and create the manifest +* if the manifest file (:file:`MANIFEST` by default) exists and the first line + does not have a comment indicating it is generated from :file:`MANIFEST.in`, + then it is used as is, unaltered + +* if the manifest file doesn't exist or has been previously automatically + generated, read :file:`MANIFEST.in` and create the manifest * if neither :file:`MANIFEST` nor :file:`MANIFEST.in` exist, create a manifest with just the default file set @@ -204,8 +218,3 @@ python setup.py sdist --manifest-only :option:`-o` is a shortcut for :option:`--manifest-only`. - -.. versionchanged:: 3.1 - An existing generated :file:`MANIFEST` will be regenerated without - :command:`sdist` comparing its modification time to the one of - :file:`MANIFEST.in` or :file:`setup.py`. diff --git a/Lib/distutils/command/sdist.py b/Lib/distutils/command/sdist.py --- a/Lib/distutils/command/sdist.py +++ b/Lib/distutils/command/sdist.py @@ -174,14 +174,20 @@ reading the manifest, or just using the default file set -- it all depends on the user's options. """ - # new behavior: + # new behavior when using a template: # the file list is recalculated everytime because # even if MANIFEST.in or setup.py are not changed # the user might have added some files in the tree that # need to be included. # - # This makes --force the default and only behavior. + # This makes --force the default and only behavior with templates. template_exists = os.path.isfile(self.template) + if not template_exists and self._manifest_is_not_generated(): + self.read_manifest() + self.filelist.sort() + self.filelist.remove_duplicates() + return + if not template_exists: self.warn(("manifest template '%s' does not exist " + "(using default file list)") % @@ -336,23 +342,28 @@ by 'add_defaults()' and 'read_template()') to the manifest file named by 'self.manifest'. """ - if os.path.isfile(self.manifest): - fp = open(self.manifest) - try: - first_line = fp.readline() - finally: - fp.close() - - if first_line != '# file GENERATED by distutils, do NOT edit\n': - log.info("not writing to manually maintained " - "manifest file '%s'" % self.manifest) - return + if self._manifest_is_not_generated(): + log.info("not writing to manually maintained " + "manifest file '%s'" % self.manifest) + return content = self.filelist.files[:] content.insert(0, '# file GENERATED by distutils, do NOT edit') self.execute(file_util.write_file, (self.manifest, content), "writing manifest file '%s'" % self.manifest) + def _manifest_is_not_generated(self): + # check for special comment used in 3.1.3 and higher + if not os.path.isfile(self.manifest): + return False + + fp = open(self.manifest) + try: + first_line = fp.readline() + finally: + fp.close() + return first_line != '# file GENERATED by distutils, do NOT edit\n' + def read_manifest(self): """Read the manifest file (named by 'self.manifest') and use it to fill in 'self.filelist', the list of files to include in the source @@ -360,12 +371,11 @@ """ log.info("reading manifest file '%s'", self.manifest) manifest = open(self.manifest) - while True: - line = manifest.readline() - if line == '': # end of file - break - if line[-1] == '\n': - line = line[0:-1] + for line in manifest: + # ignore comments and blank lines + line = line.strip() + if line.startswith('#') or not line: + continue self.filelist.append(line) manifest.close() diff --git a/Lib/distutils/tests/test_sdist.py b/Lib/distutils/tests/test_sdist.py --- a/Lib/distutils/tests/test_sdist.py +++ b/Lib/distutils/tests/test_sdist.py @@ -1,21 +1,19 @@ """Tests for distutils.command.sdist.""" import os +import tarfile import unittest -import shutil +import warnings import zipfile from os.path import join -import sys -import tempfile -import warnings +from textwrap import dedent from test.support import captured_stdout, check_warnings, run_unittest from distutils.command.sdist import sdist, show_formats from distutils.core import Distribution from distutils.tests.test_config import PyPIRCCommandTestCase -from distutils.errors import DistutilsExecError, DistutilsOptionError +from distutils.errors import DistutilsOptionError from distutils.spawn import find_executable -from distutils.tests import support from distutils.log import WARN from distutils.archive_util import ARCHIVE_FORMATS @@ -346,13 +344,33 @@ self.assertEqual(manifest[0], '# file GENERATED by distutils, do NOT edit') + @unittest.skipUnless(ZLIB_SUPPORT, "Need zlib support to run") + def test_manifest_comments(self): + # make sure comments don't cause exceptions or wrong includes + contents = dedent("""\ + # bad.py + #bad.py + good.py + """) + dist, cmd = self.get_cmd() + cmd.ensure_finalized() + self.write_file((self.tmp_dir, cmd.manifest), contents) + self.write_file((self.tmp_dir, 'good.py'), '# pick me!') + self.write_file((self.tmp_dir, 'bad.py'), "# don't pick me!") + self.write_file((self.tmp_dir, '#bad.py'), "# don't pick me!") + cmd.run() + self.assertEqual(cmd.filelist.files, ['good.py']) + @unittest.skipUnless(ZLIB_SUPPORT, 'Need zlib support to run') def test_manual_manifest(self): # check that a MANIFEST without a marker is left alone dist, cmd = self.get_cmd() cmd.ensure_finalized() self.write_file((self.tmp_dir, cmd.manifest), 'README.manual') + self.write_file((self.tmp_dir, 'README.manual'), + 'This project maintains its MANIFEST file itself.') cmd.run() + self.assertEqual(cmd.filelist.files, ['README.manual']) f = open(cmd.manifest) try: @@ -363,6 +381,15 @@ self.assertEqual(manifest, ['README.manual']) + archive_name = join(self.tmp_dir, 'dist', 'fake-1.0.tar.gz') + archive = tarfile.open(archive_name) + try: + filenames = [tarinfo.name for tarinfo in archive] + finally: + archive.close() + self.assertEqual(sorted(filenames), ['fake-1.0', 'fake-1.0/PKG-INFO', + 'fake-1.0/README.manual']) + def test_suite(): return unittest.makeSuite(SDistTestCase) diff --git a/Misc/ACKS b/Misc/ACKS --- a/Misc/ACKS +++ b/Misc/ACKS @@ -215,6 +215,7 @@ Vincent Delft Arnaud Delobelle Erik Demaine +John Dennis Roger Dev Raghuram Devarakonda Caleb Deveraux @@ -875,6 +876,7 @@ Tobias Thelen James Thomas Robin Thomas +Stephen Thorne Jeremy Thurgood Eric Tiedemann July Tikhonov diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -41,6 +41,9 @@ Library ------- +- Issues #11104, #8688: Fix the behavior of distutils' sdist command with + manually-maintained MANIFEST files. + - Issue #12464: tempfile.TemporaryDirectory.cleanup() should not follow symlinks: fix it. Patch by Petri Lehtinen. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Aug 1 14:45:25 2011 From: python-checkins at python.org (eric.araujo) Date: Mon, 01 Aug 2011 14:45:25 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=283=2E2=29=3A_Stop_trying_to_?= =?utf8?q?write_into_the_stdlib_during_lib2to3_tests_=28=2312331=29=2E?= Message-ID: http://hg.python.org/cpython/rev/2b5a0c4e052b changeset: 71665:2b5a0c4e052b branch: 3.2 user: ?ric Araujo date: Sun Jul 31 17:58:46 2011 +0200 summary: Stop trying to write into the stdlib during lib2to3 tests (#12331). This prevents tests from failing when run from a Python installed in a read-only directory. files: Lib/lib2to3/tests/test_refactor.py | 18 +++++++++++------- Misc/NEWS | 3 +++ 2 files changed, 14 insertions(+), 7 deletions(-) diff --git a/Lib/lib2to3/tests/test_refactor.py b/Lib/lib2to3/tests/test_refactor.py --- a/Lib/lib2to3/tests/test_refactor.py +++ b/Lib/lib2to3/tests/test_refactor.py @@ -177,22 +177,26 @@ self.assertEqual(results, expected) def check_file_refactoring(self, test_file, fixers=_2TO3_FIXERS): + tmpdir = tempfile.mkdtemp(prefix="2to3-test_refactor") + self.addCleanup(shutil.rmtree, tmpdir) + # make a copy of the tested file that we can write to + shutil.copy(test_file, tmpdir) + test_file = os.path.join(tmpdir, os.path.basename(test_file)) + os.chmod(test_file, 0o644) + def read_file(): with open(test_file, "rb") as fp: return fp.read() + old_contents = read_file() rt = self.rt(fixers=fixers) rt.refactor_file(test_file) self.assertEqual(old_contents, read_file()) - try: - rt.refactor_file(test_file, True) - new_contents = read_file() - self.assertNotEqual(old_contents, new_contents) - finally: - with open(test_file, "wb") as fp: - fp.write(old_contents) + rt.refactor_file(test_file, True) + new_contents = read_file() + self.assertNotEqual(old_contents, new_contents) return new_contents def test_refactor_file(self): diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -140,6 +140,9 @@ Tests ----- +- Issue #12331: The test suite for lib2to3 can now run from an installed + Python. + - Issue #12626: In regrtest, allow to filter tests using a glob filter with the ``-m`` (or ``--match``) option. This works with all test cases using the unittest module. This is useful with long test suites -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Aug 1 14:45:25 2011 From: python-checkins at python.org (eric.araujo) Date: Mon, 01 Aug 2011 14:45:25 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=28merge_3=2E2_-=3E_default=29?= =?utf8?q?=3A_Merge_fixes_for_=239860=2C_=2311104/=238688_and_=2312331_fro?= =?utf8?q?m_3=2E2?= Message-ID: http://hg.python.org/cpython/rev/5993f91598ce changeset: 71666:5993f91598ce parent: 71632:8d1a046441ea parent: 71665:2b5a0c4e052b user: ?ric Araujo date: Sun Jul 31 18:33:00 2011 +0200 summary: Merge fixes for #9860, #11104/#8688 and #12331 from 3.2 files: Doc/distutils/sourcedist.rst | 25 ++++++--- Lib/distutils/command/sdist.py | 48 ++++++++++------- Lib/distutils/tests/test_sdist.py | 39 ++++++++++++-- Lib/lib2to3/tests/test_refactor.py | 18 ++++-- Misc/ACKS | 2 + Misc/NEWS | 6 ++ Tools/scripts/patchcheck.py | 20 ++++-- 7 files changed, 111 insertions(+), 47 deletions(-) diff --git a/Doc/distutils/sourcedist.rst b/Doc/distutils/sourcedist.rst --- a/Doc/distutils/sourcedist.rst +++ b/Doc/distutils/sourcedist.rst @@ -103,10 +103,20 @@ :file:`MANIFEST`, you must specify everything: the default set of files described above does not apply in this case. -.. versionadded:: 3.1 +.. versionchanged:: 3.1 + An existing generated :file:`MANIFEST` will be regenerated without + :command:`sdist` comparing its modification time to the one of + :file:`MANIFEST.in` or :file:`setup.py`. + +.. versionchanged:: 3.1.3 :file:`MANIFEST` files start with a comment indicating they are generated. Files without this comment are not overwritten or removed. +.. versionchanged:: 3.2.2 + :command:`sdist` will read a :file:`MANIFEST` file if no :file:`MANIFEST.in` + exists, like it used to do. + + The manifest template has one command per line, where each command specifies a set of files to include or exclude from the source distribution. For an example, again we turn to the Distutils' own manifest template:: @@ -185,8 +195,12 @@ The normal course of operations for the :command:`sdist` command is as follows: -* if the manifest file, :file:`MANIFEST` doesn't exist, read :file:`MANIFEST.in` - and create the manifest +* if the manifest file (:file:`MANIFEST` by default) exists and the first line + does not have a comment indicating it is generated from :file:`MANIFEST.in`, + then it is used as is, unaltered + +* if the manifest file doesn't exist or has been previously automatically + generated, read :file:`MANIFEST.in` and create the manifest * if neither :file:`MANIFEST` nor :file:`MANIFEST.in` exist, create a manifest with just the default file set @@ -204,8 +218,3 @@ python setup.py sdist --manifest-only :option:`-o` is a shortcut for :option:`--manifest-only`. - -.. versionchanged:: 3.1 - An existing generated :file:`MANIFEST` will be regenerated without - :command:`sdist` comparing its modification time to the one of - :file:`MANIFEST.in` or :file:`setup.py`. diff --git a/Lib/distutils/command/sdist.py b/Lib/distutils/command/sdist.py --- a/Lib/distutils/command/sdist.py +++ b/Lib/distutils/command/sdist.py @@ -174,14 +174,20 @@ reading the manifest, or just using the default file set -- it all depends on the user's options. """ - # new behavior: + # new behavior when using a template: # the file list is recalculated everytime because # even if MANIFEST.in or setup.py are not changed # the user might have added some files in the tree that # need to be included. # - # This makes --force the default and only behavior. + # This makes --force the default and only behavior with templates. template_exists = os.path.isfile(self.template) + if not template_exists and self._manifest_is_not_generated(): + self.read_manifest() + self.filelist.sort() + self.filelist.remove_duplicates() + return + if not template_exists: self.warn(("manifest template '%s' does not exist " + "(using default file list)") % @@ -336,23 +342,28 @@ by 'add_defaults()' and 'read_template()') to the manifest file named by 'self.manifest'. """ - if os.path.isfile(self.manifest): - fp = open(self.manifest) - try: - first_line = fp.readline() - finally: - fp.close() - - if first_line != '# file GENERATED by distutils, do NOT edit\n': - log.info("not writing to manually maintained " - "manifest file '%s'" % self.manifest) - return + if self._manifest_is_not_generated(): + log.info("not writing to manually maintained " + "manifest file '%s'" % self.manifest) + return content = self.filelist.files[:] content.insert(0, '# file GENERATED by distutils, do NOT edit') self.execute(file_util.write_file, (self.manifest, content), "writing manifest file '%s'" % self.manifest) + def _manifest_is_not_generated(self): + # check for special comment used in 3.1.3 and higher + if not os.path.isfile(self.manifest): + return False + + fp = open(self.manifest) + try: + first_line = fp.readline() + finally: + fp.close() + return first_line != '# file GENERATED by distutils, do NOT edit\n' + def read_manifest(self): """Read the manifest file (named by 'self.manifest') and use it to fill in 'self.filelist', the list of files to include in the source @@ -360,12 +371,11 @@ """ log.info("reading manifest file '%s'", self.manifest) manifest = open(self.manifest) - while True: - line = manifest.readline() - if line == '': # end of file - break - if line[-1] == '\n': - line = line[0:-1] + for line in manifest: + # ignore comments and blank lines + line = line.strip() + if line.startswith('#') or not line: + continue self.filelist.append(line) manifest.close() diff --git a/Lib/distutils/tests/test_sdist.py b/Lib/distutils/tests/test_sdist.py --- a/Lib/distutils/tests/test_sdist.py +++ b/Lib/distutils/tests/test_sdist.py @@ -1,21 +1,19 @@ """Tests for distutils.command.sdist.""" import os +import tarfile import unittest -import shutil +import warnings import zipfile from os.path import join -import sys -import tempfile -import warnings +from textwrap import dedent from test.support import captured_stdout, check_warnings, run_unittest from distutils.command.sdist import sdist, show_formats from distutils.core import Distribution from distutils.tests.test_config import PyPIRCCommandTestCase -from distutils.errors import DistutilsExecError, DistutilsOptionError +from distutils.errors import DistutilsOptionError from distutils.spawn import find_executable -from distutils.tests import support from distutils.log import WARN from distutils.archive_util import ARCHIVE_FORMATS @@ -346,13 +344,33 @@ self.assertEqual(manifest[0], '# file GENERATED by distutils, do NOT edit') + @unittest.skipUnless(ZLIB_SUPPORT, "Need zlib support to run") + def test_manifest_comments(self): + # make sure comments don't cause exceptions or wrong includes + contents = dedent("""\ + # bad.py + #bad.py + good.py + """) + dist, cmd = self.get_cmd() + cmd.ensure_finalized() + self.write_file((self.tmp_dir, cmd.manifest), contents) + self.write_file((self.tmp_dir, 'good.py'), '# pick me!') + self.write_file((self.tmp_dir, 'bad.py'), "# don't pick me!") + self.write_file((self.tmp_dir, '#bad.py'), "# don't pick me!") + cmd.run() + self.assertEqual(cmd.filelist.files, ['good.py']) + @unittest.skipUnless(ZLIB_SUPPORT, 'Need zlib support to run') def test_manual_manifest(self): # check that a MANIFEST without a marker is left alone dist, cmd = self.get_cmd() cmd.ensure_finalized() self.write_file((self.tmp_dir, cmd.manifest), 'README.manual') + self.write_file((self.tmp_dir, 'README.manual'), + 'This project maintains its MANIFEST file itself.') cmd.run() + self.assertEqual(cmd.filelist.files, ['README.manual']) f = open(cmd.manifest) try: @@ -363,6 +381,15 @@ self.assertEqual(manifest, ['README.manual']) + archive_name = join(self.tmp_dir, 'dist', 'fake-1.0.tar.gz') + archive = tarfile.open(archive_name) + try: + filenames = [tarinfo.name for tarinfo in archive] + finally: + archive.close() + self.assertEqual(sorted(filenames), ['fake-1.0', 'fake-1.0/PKG-INFO', + 'fake-1.0/README.manual']) + def test_suite(): return unittest.makeSuite(SDistTestCase) diff --git a/Lib/lib2to3/tests/test_refactor.py b/Lib/lib2to3/tests/test_refactor.py --- a/Lib/lib2to3/tests/test_refactor.py +++ b/Lib/lib2to3/tests/test_refactor.py @@ -177,22 +177,26 @@ self.assertEqual(results, expected) def check_file_refactoring(self, test_file, fixers=_2TO3_FIXERS): + tmpdir = tempfile.mkdtemp(prefix="2to3-test_refactor") + self.addCleanup(shutil.rmtree, tmpdir) + # make a copy of the tested file that we can write to + shutil.copy(test_file, tmpdir) + test_file = os.path.join(tmpdir, os.path.basename(test_file)) + os.chmod(test_file, 0o644) + def read_file(): with open(test_file, "rb") as fp: return fp.read() + old_contents = read_file() rt = self.rt(fixers=fixers) rt.refactor_file(test_file) self.assertEqual(old_contents, read_file()) - try: - rt.refactor_file(test_file, True) - new_contents = read_file() - self.assertNotEqual(old_contents, new_contents) - finally: - with open(test_file, "wb") as fp: - fp.write(old_contents) + rt.refactor_file(test_file, True) + new_contents = read_file() + self.assertNotEqual(old_contents, new_contents) return new_contents def test_refactor_file(self): diff --git a/Misc/ACKS b/Misc/ACKS --- a/Misc/ACKS +++ b/Misc/ACKS @@ -228,6 +228,7 @@ Arnaud Delobelle Konrad Delong Erik Demaine +John Dennis Roger Dev Raghuram Devarakonda Caleb Deveraux @@ -931,6 +932,7 @@ Tobias Thelen James Thomas Robin Thomas +Stephen Thorne Jeremy Thurgood Eric Tiedemann July Tikhonov diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -249,6 +249,9 @@ Library ------- +- Issues #11104, #8688: Fix the behavior of distutils' sdist command with + manually-maintained MANIFEST files. + - Issue #11281: smtplib.STMP gets source_address parameter, which adds the ability to bind to specific source address on a machine with multiple interfaces. Patch by Paulo Scardine. @@ -1144,6 +1147,9 @@ Tests ----- +- Issue #12331: The test suite for lib2to3 can now run from an installed + Python. + - Issue #12626: In regrtest, allow to filter tests using a glob filter with the ``-m`` (or ``--match``) option. This works with all test cases using the unittest module. This is useful with long test suites diff --git a/Tools/scripts/patchcheck.py b/Tools/scripts/patchcheck.py --- a/Tools/scripts/patchcheck.py +++ b/Tools/scripts/patchcheck.py @@ -4,11 +4,15 @@ import shutil import os.path import subprocess +import sysconfig import reindent import untabify +SRCDIR = sysconfig.get_config_var('srcdir') + + def n_files_str(count): """Return 'N file(s)' with the proper plurality on 'file'.""" return "{} file{}".format(count, "s" if count != 1 else "") @@ -36,7 +40,7 @@ info=lambda x: n_files_str(len(x))) def changed_files(): """Get the list of changed or added files from the VCS.""" - if os.path.isdir('.hg'): + if os.path.isdir(os.path.join(SRCDIR, '.hg')): cmd = 'hg status --added --modified --no-status' else: sys.exit('need a checkout to get modified files') @@ -65,7 +69,7 @@ """Make sure that the whitespace for .py files have been normalized.""" reindent.makebackup = False # No need to create backups. fixed = [path for path in file_paths if path.endswith('.py') and - reindent.check(path)] + reindent.check(os.path.join(SRCDIR, path))] return fixed @@ -74,10 +78,11 @@ """Report if any C files """ fixed = [] for path in file_paths: - with open(path, 'r') as f: + abspath = os.path.join(SRCDIR, path) + with open(abspath, 'r') as f: if '\t' not in f.read(): continue - untabify.process(path, 8, verbose=False) + untabify.process(abspath, 8, verbose=False) fixed.append(path) return fixed @@ -88,13 +93,14 @@ def normalize_docs_whitespace(file_paths): fixed = [] for path in file_paths: + abspath = os.path.join(SRCDIR, path) try: - with open(path, 'rb') as f: + with open(abspath, 'rb') as f: lines = f.readlines() new_lines = [ws_re.sub(br'\1', line) for line in lines] if new_lines != lines: - shutil.copyfile(path, path + '.bak') - with open(path, 'wb') as f: + shutil.copyfile(abspath, abspath + '.bak') + with open(abspath, 'wb') as f: f.writelines(new_lines) fixed.append(path) except Exception as err: -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Aug 1 14:45:26 2011 From: python-checkins at python.org (eric.araujo) Date: Mon, 01 Aug 2011 14:45:26 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Small_cleanup?= Message-ID: http://hg.python.org/cpython/rev/d72e04ff0f97 changeset: 71667:d72e04ff0f97 user: ?ric Araujo date: Sun Jul 31 18:41:25 2011 +0200 summary: Small cleanup files: Tools/scripts/patchcheck.py | 13 ++++--------- 1 files changed, 4 insertions(+), 9 deletions(-) diff --git a/Tools/scripts/patchcheck.py b/Tools/scripts/patchcheck.py --- a/Tools/scripts/patchcheck.py +++ b/Tools/scripts/patchcheck.py @@ -39,18 +39,13 @@ @status("Getting the list of files that have been added/changed", info=lambda x: n_files_str(len(x))) def changed_files(): - """Get the list of changed or added files from the VCS.""" - if os.path.isdir(os.path.join(SRCDIR, '.hg')): - cmd = 'hg status --added --modified --no-status' - else: + """Get the list of changed or added files from Mercurial.""" + if not os.path.isdir(os.path.join(SRCDIR, '.hg')): sys.exit('need a checkout to get modified files') - st = subprocess.Popen(cmd.split(), stdout=subprocess.PIPE) - try: - st.wait() + cmd = 'hg status --added --modified --no-status' + with subprocess.Popen(cmd.split(), stdout=subprocess.PIPE) as st: return [x.decode().rstrip() for x in st.stdout] - finally: - st.stdout.close() def report_modified_files(file_paths): -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Aug 1 14:45:27 2011 From: python-checkins at python.org (eric.araujo) Date: Mon, 01 Aug 2011 14:45:27 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Stop_trying_to_write_into_t?= =?utf8?q?he_stdlib_during_packaging_tests_=28=2312331=29=2E?= Message-ID: http://hg.python.org/cpython/rev/7ee8f413188e changeset: 71668:7ee8f413188e user: ?ric Araujo date: Sun Jul 31 20:47:47 2011 +0200 summary: Stop trying to write into the stdlib during packaging tests (#12331). This prevents tests from failing when run from a Python installed in a read-only directory. The code is a bit uglier; shutil.copytree calls copystat on directories behind our back, so I had to add an os.walk with os.chmod (*and* os.path.join!) calls. shutil, I am disappoint. This changeset is dedicated to the hundreds of neurons that were lost while I was debugging this on an otherwise fine afternoon. files: Lib/packaging/tests/test_database.py | 54 +++++++++------ Misc/NEWS | 3 + 2 files changed, 34 insertions(+), 23 deletions(-) diff --git a/Lib/packaging/tests/test_database.py b/Lib/packaging/tests/test_database.py --- a/Lib/packaging/tests/test_database.py +++ b/Lib/packaging/tests/test_database.py @@ -39,20 +39,40 @@ return [path, digest, size] -class CommonDistributionTests: +class FakeDistsMixin: + + def setUp(self): + super(FakeDistsMixin, self).setUp() + self.addCleanup(enable_cache) + disable_cache() + + # make a copy that we can write into for our fake installed + # distributions + tmpdir = tempfile.mkdtemp() + self.addCleanup(shutil.rmtree, tmpdir) + self.fake_dists_path = os.path.join(tmpdir, 'fake_dists') + fake_dists_src = os.path.abspath( + os.path.join(os.path.dirname(__file__), 'fake_dists')) + shutil.copytree(fake_dists_src, self.fake_dists_path) + # XXX ugly workaround: revert copystat calls done by shutil behind our + # back (to avoid getting a read-only copy of a read-only file). we + # could pass a custom copy_function to change the mode of files, but + # shutil gives no control over the mode of directories :( + for root, dirs, files in os.walk(self.fake_dists_path): + os.chmod(root, 0o755) + for f in files: + os.chmod(os.path.join(root, f), 0o644) + for d in dirs: + os.chmod(os.path.join(root, d), 0o755) + + +class CommonDistributionTests(FakeDistsMixin): """Mixin used to test the interface common to both Distribution classes. Derived classes define cls, sample_dist, dirs and records. These attributes are used in test methods. See source code for details. """ - def setUp(self): - super(CommonDistributionTests, self).setUp() - self.addCleanup(enable_cache) - disable_cache() - self.fake_dists_path = os.path.abspath( - os.path.join(os.path.dirname(__file__), 'fake_dists')) - def test_instantiation(self): # check that useful attributes are here name, version, distdir = self.sample_dist @@ -110,6 +130,7 @@ self.records = {} for distinfo_dir in self.dirs: + record_file = os.path.join(distinfo_dir, 'RECORD') with open(record_file, 'w') as file: record_writer = csv.writer( @@ -138,12 +159,6 @@ record_data[path] = md5_, size self.records[distinfo_dir] = record_data - def tearDown(self): - for distinfo_dir in self.dirs: - record_file = os.path.join(distinfo_dir, 'RECORD') - open(record_file, 'wb').close() - super(TestDistribution, self).tearDown() - def test_instantiation(self): super(TestDistribution, self).test_instantiation() self.assertIsInstance(self.dist.requested, bool) @@ -252,20 +267,13 @@ class TestDatabase(support.LoggingCatcher, + FakeDistsMixin, unittest.TestCase): def setUp(self): super(TestDatabase, self).setUp() - disable_cache() - # Setup the path environment with our fake distributions - current_path = os.path.abspath(os.path.dirname(__file__)) - self.fake_dists_path = os.path.join(current_path, 'fake_dists') sys.path.insert(0, self.fake_dists_path) - - def tearDown(self): - sys.path.remove(self.fake_dists_path) - enable_cache() - super(TestDatabase, self).tearDown() + self.addCleanup(sys.path.remove, self.fake_dists_path) def test_distinfo_dirname(self): # Given a name and a version, we expect the distinfo_dirname function diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -1147,6 +1147,9 @@ Tests ----- +- Issue #12331: The test suite for the packaging module can now run from an + installed Python. + - Issue #12331: The test suite for lib2to3 can now run from an installed Python. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Aug 1 14:45:28 2011 From: python-checkins at python.org (eric.araujo) Date: Mon, 01 Aug 2011 14:45:28 +0200 Subject: [Python-checkins] =?utf8?b?Y3B5dGhvbiAobWVyZ2UgMy4yIC0+IDMuMik6?= =?utf8?q?_Branch_merge?= Message-ID: http://hg.python.org/cpython/rev/da79c5d0ed17 changeset: 71669:da79c5d0ed17 branch: 3.2 parent: 71661:ba5bd8c1ae27 parent: 71665:2b5a0c4e052b user: ?ric Araujo date: Mon Aug 01 14:43:45 2011 +0200 summary: Branch merge files: Doc/distutils/sourcedist.rst | 25 ++++++--- Lib/distutils/command/sdist.py | 48 ++++++++++------- Lib/distutils/tests/test_sdist.py | 39 ++++++++++++-- Lib/lib2to3/tests/test_refactor.py | 18 ++++-- Misc/ACKS | 2 + Misc/NEWS | 6 ++ Tools/scripts/patchcheck.py | 20 ++++-- 7 files changed, 111 insertions(+), 47 deletions(-) diff --git a/Doc/distutils/sourcedist.rst b/Doc/distutils/sourcedist.rst --- a/Doc/distutils/sourcedist.rst +++ b/Doc/distutils/sourcedist.rst @@ -103,10 +103,20 @@ :file:`MANIFEST`, you must specify everything: the default set of files described above does not apply in this case. -.. versionadded:: 3.1 +.. versionchanged:: 3.1 + An existing generated :file:`MANIFEST` will be regenerated without + :command:`sdist` comparing its modification time to the one of + :file:`MANIFEST.in` or :file:`setup.py`. + +.. versionchanged:: 3.1.3 :file:`MANIFEST` files start with a comment indicating they are generated. Files without this comment are not overwritten or removed. +.. versionchanged:: 3.2.2 + :command:`sdist` will read a :file:`MANIFEST` file if no :file:`MANIFEST.in` + exists, like it used to do. + + The manifest template has one command per line, where each command specifies a set of files to include or exclude from the source distribution. For an example, again we turn to the Distutils' own manifest template:: @@ -185,8 +195,12 @@ The normal course of operations for the :command:`sdist` command is as follows: -* if the manifest file, :file:`MANIFEST` doesn't exist, read :file:`MANIFEST.in` - and create the manifest +* if the manifest file (:file:`MANIFEST` by default) exists and the first line + does not have a comment indicating it is generated from :file:`MANIFEST.in`, + then it is used as is, unaltered + +* if the manifest file doesn't exist or has been previously automatically + generated, read :file:`MANIFEST.in` and create the manifest * if neither :file:`MANIFEST` nor :file:`MANIFEST.in` exist, create a manifest with just the default file set @@ -204,8 +218,3 @@ python setup.py sdist --manifest-only :option:`-o` is a shortcut for :option:`--manifest-only`. - -.. versionchanged:: 3.1 - An existing generated :file:`MANIFEST` will be regenerated without - :command:`sdist` comparing its modification time to the one of - :file:`MANIFEST.in` or :file:`setup.py`. diff --git a/Lib/distutils/command/sdist.py b/Lib/distutils/command/sdist.py --- a/Lib/distutils/command/sdist.py +++ b/Lib/distutils/command/sdist.py @@ -174,14 +174,20 @@ reading the manifest, or just using the default file set -- it all depends on the user's options. """ - # new behavior: + # new behavior when using a template: # the file list is recalculated everytime because # even if MANIFEST.in or setup.py are not changed # the user might have added some files in the tree that # need to be included. # - # This makes --force the default and only behavior. + # This makes --force the default and only behavior with templates. template_exists = os.path.isfile(self.template) + if not template_exists and self._manifest_is_not_generated(): + self.read_manifest() + self.filelist.sort() + self.filelist.remove_duplicates() + return + if not template_exists: self.warn(("manifest template '%s' does not exist " + "(using default file list)") % @@ -336,23 +342,28 @@ by 'add_defaults()' and 'read_template()') to the manifest file named by 'self.manifest'. """ - if os.path.isfile(self.manifest): - fp = open(self.manifest) - try: - first_line = fp.readline() - finally: - fp.close() - - if first_line != '# file GENERATED by distutils, do NOT edit\n': - log.info("not writing to manually maintained " - "manifest file '%s'" % self.manifest) - return + if self._manifest_is_not_generated(): + log.info("not writing to manually maintained " + "manifest file '%s'" % self.manifest) + return content = self.filelist.files[:] content.insert(0, '# file GENERATED by distutils, do NOT edit') self.execute(file_util.write_file, (self.manifest, content), "writing manifest file '%s'" % self.manifest) + def _manifest_is_not_generated(self): + # check for special comment used in 3.1.3 and higher + if not os.path.isfile(self.manifest): + return False + + fp = open(self.manifest) + try: + first_line = fp.readline() + finally: + fp.close() + return first_line != '# file GENERATED by distutils, do NOT edit\n' + def read_manifest(self): """Read the manifest file (named by 'self.manifest') and use it to fill in 'self.filelist', the list of files to include in the source @@ -360,12 +371,11 @@ """ log.info("reading manifest file '%s'", self.manifest) manifest = open(self.manifest) - while True: - line = manifest.readline() - if line == '': # end of file - break - if line[-1] == '\n': - line = line[0:-1] + for line in manifest: + # ignore comments and blank lines + line = line.strip() + if line.startswith('#') or not line: + continue self.filelist.append(line) manifest.close() diff --git a/Lib/distutils/tests/test_sdist.py b/Lib/distutils/tests/test_sdist.py --- a/Lib/distutils/tests/test_sdist.py +++ b/Lib/distutils/tests/test_sdist.py @@ -1,21 +1,19 @@ """Tests for distutils.command.sdist.""" import os +import tarfile import unittest -import shutil +import warnings import zipfile from os.path import join -import sys -import tempfile -import warnings +from textwrap import dedent from test.support import captured_stdout, check_warnings, run_unittest from distutils.command.sdist import sdist, show_formats from distutils.core import Distribution from distutils.tests.test_config import PyPIRCCommandTestCase -from distutils.errors import DistutilsExecError, DistutilsOptionError +from distutils.errors import DistutilsOptionError from distutils.spawn import find_executable -from distutils.tests import support from distutils.log import WARN from distutils.archive_util import ARCHIVE_FORMATS @@ -346,13 +344,33 @@ self.assertEqual(manifest[0], '# file GENERATED by distutils, do NOT edit') + @unittest.skipUnless(ZLIB_SUPPORT, "Need zlib support to run") + def test_manifest_comments(self): + # make sure comments don't cause exceptions or wrong includes + contents = dedent("""\ + # bad.py + #bad.py + good.py + """) + dist, cmd = self.get_cmd() + cmd.ensure_finalized() + self.write_file((self.tmp_dir, cmd.manifest), contents) + self.write_file((self.tmp_dir, 'good.py'), '# pick me!') + self.write_file((self.tmp_dir, 'bad.py'), "# don't pick me!") + self.write_file((self.tmp_dir, '#bad.py'), "# don't pick me!") + cmd.run() + self.assertEqual(cmd.filelist.files, ['good.py']) + @unittest.skipUnless(ZLIB_SUPPORT, 'Need zlib support to run') def test_manual_manifest(self): # check that a MANIFEST without a marker is left alone dist, cmd = self.get_cmd() cmd.ensure_finalized() self.write_file((self.tmp_dir, cmd.manifest), 'README.manual') + self.write_file((self.tmp_dir, 'README.manual'), + 'This project maintains its MANIFEST file itself.') cmd.run() + self.assertEqual(cmd.filelist.files, ['README.manual']) f = open(cmd.manifest) try: @@ -363,6 +381,15 @@ self.assertEqual(manifest, ['README.manual']) + archive_name = join(self.tmp_dir, 'dist', 'fake-1.0.tar.gz') + archive = tarfile.open(archive_name) + try: + filenames = [tarinfo.name for tarinfo in archive] + finally: + archive.close() + self.assertEqual(sorted(filenames), ['fake-1.0', 'fake-1.0/PKG-INFO', + 'fake-1.0/README.manual']) + def test_suite(): return unittest.makeSuite(SDistTestCase) diff --git a/Lib/lib2to3/tests/test_refactor.py b/Lib/lib2to3/tests/test_refactor.py --- a/Lib/lib2to3/tests/test_refactor.py +++ b/Lib/lib2to3/tests/test_refactor.py @@ -177,22 +177,26 @@ self.assertEqual(results, expected) def check_file_refactoring(self, test_file, fixers=_2TO3_FIXERS): + tmpdir = tempfile.mkdtemp(prefix="2to3-test_refactor") + self.addCleanup(shutil.rmtree, tmpdir) + # make a copy of the tested file that we can write to + shutil.copy(test_file, tmpdir) + test_file = os.path.join(tmpdir, os.path.basename(test_file)) + os.chmod(test_file, 0o644) + def read_file(): with open(test_file, "rb") as fp: return fp.read() + old_contents = read_file() rt = self.rt(fixers=fixers) rt.refactor_file(test_file) self.assertEqual(old_contents, read_file()) - try: - rt.refactor_file(test_file, True) - new_contents = read_file() - self.assertNotEqual(old_contents, new_contents) - finally: - with open(test_file, "wb") as fp: - fp.write(old_contents) + rt.refactor_file(test_file, True) + new_contents = read_file() + self.assertNotEqual(old_contents, new_contents) return new_contents def test_refactor_file(self): diff --git a/Misc/ACKS b/Misc/ACKS --- a/Misc/ACKS +++ b/Misc/ACKS @@ -215,6 +215,7 @@ Vincent Delft Arnaud Delobelle Erik Demaine +John Dennis Roger Dev Raghuram Devarakonda Caleb Deveraux @@ -875,6 +876,7 @@ Tobias Thelen James Thomas Robin Thomas +Stephen Thorne Jeremy Thurgood Eric Tiedemann July Tikhonov diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -41,6 +41,9 @@ Library ------- +- Issues #11104, #8688: Fix the behavior of distutils' sdist command with + manually-maintained MANIFEST files. + - Issue #12464: tempfile.TemporaryDirectory.cleanup() should not follow symlinks: fix it. Patch by Petri Lehtinen. @@ -137,6 +140,9 @@ Tests ----- +- Issue #12331: The test suite for lib2to3 can now run from an installed + Python. + - Issue #12626: In regrtest, allow to filter tests using a glob filter with the ``-m`` (or ``--match``) option. This works with all test cases using the unittest module. This is useful with long test suites diff --git a/Tools/scripts/patchcheck.py b/Tools/scripts/patchcheck.py --- a/Tools/scripts/patchcheck.py +++ b/Tools/scripts/patchcheck.py @@ -4,11 +4,15 @@ import shutil import os.path import subprocess +import sysconfig import reindent import untabify +SRCDIR = sysconfig.get_config_var('srcdir') + + def n_files_str(count): """Return 'N file(s)' with the proper plurality on 'file'.""" return "{} file{}".format(count, "s" if count != 1 else "") @@ -36,7 +40,7 @@ info=lambda x: n_files_str(len(x))) def changed_files(): """Get the list of changed or added files from the VCS.""" - if os.path.isdir('.hg'): + if os.path.isdir(os.path.join(SRCDIR, '.hg')): vcs = 'hg' cmd = 'hg status --added --modified --no-status' elif os.path.isdir('.svn'): @@ -75,7 +79,7 @@ reindent.makebackup = False # No need to create backups. fixed = [] for path in (x for x in file_paths if x.endswith('.py')): - if reindent.check(path): + if reindent.check(os.path.join(SRCDIR, path)): fixed.append(path) return fixed @@ -85,10 +89,11 @@ """Report if any C files """ fixed = [] for path in file_paths: - with open(path, 'r') as f: + abspath = os.path.join(SRCDIR, path) + with open(abspath, 'r') as f: if '\t' not in f.read(): continue - untabify.process(path, 8, verbose=False) + untabify.process(abspath, 8, verbose=False) fixed.append(path) return fixed @@ -99,13 +104,14 @@ def normalize_docs_whitespace(file_paths): fixed = [] for path in file_paths: + abspath = os.path.join(SRCDIR, path) try: - with open(path, 'rb') as f: + with open(abspath, 'rb') as f: lines = f.readlines() new_lines = [ws_re.sub(br'\1', line) for line in lines] if new_lines != lines: - shutil.copyfile(path, path + '.bak') - with open(path, 'wb') as f: + shutil.copyfile(abspath, abspath + '.bak') + with open(abspath, 'wb') as f: f.writelines(new_lines) fixed.append(path) except Exception as err: -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Aug 1 14:45:28 2011 From: python-checkins at python.org (eric.araujo) Date: Mon, 01 Aug 2011 14:45:28 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=28merge_default_-=3E_default?= =?utf8?q?=29=3A_Branch_merge?= Message-ID: http://hg.python.org/cpython/rev/abd8380c15e2 changeset: 71670:abd8380c15e2 parent: 71662:2d69900c0820 parent: 71668:7ee8f413188e user: ?ric Araujo date: Mon Aug 01 14:44:17 2011 +0200 summary: Branch merge files: Doc/distutils/sourcedist.rst | 25 +++++-- Lib/distutils/command/sdist.py | 48 ++++++++----- Lib/distutils/tests/test_sdist.py | 39 +++++++++- Lib/lib2to3/tests/test_refactor.py | 18 +++-- Lib/packaging/tests/test_database.py | 54 +++++++++------ Misc/ACKS | 2 + Misc/NEWS | 9 ++ Tools/scripts/patchcheck.py | 31 ++++---- 8 files changed, 148 insertions(+), 78 deletions(-) diff --git a/Doc/distutils/sourcedist.rst b/Doc/distutils/sourcedist.rst --- a/Doc/distutils/sourcedist.rst +++ b/Doc/distutils/sourcedist.rst @@ -103,10 +103,20 @@ :file:`MANIFEST`, you must specify everything: the default set of files described above does not apply in this case. -.. versionadded:: 3.1 +.. versionchanged:: 3.1 + An existing generated :file:`MANIFEST` will be regenerated without + :command:`sdist` comparing its modification time to the one of + :file:`MANIFEST.in` or :file:`setup.py`. + +.. versionchanged:: 3.1.3 :file:`MANIFEST` files start with a comment indicating they are generated. Files without this comment are not overwritten or removed. +.. versionchanged:: 3.2.2 + :command:`sdist` will read a :file:`MANIFEST` file if no :file:`MANIFEST.in` + exists, like it used to do. + + The manifest template has one command per line, where each command specifies a set of files to include or exclude from the source distribution. For an example, again we turn to the Distutils' own manifest template:: @@ -185,8 +195,12 @@ The normal course of operations for the :command:`sdist` command is as follows: -* if the manifest file, :file:`MANIFEST` doesn't exist, read :file:`MANIFEST.in` - and create the manifest +* if the manifest file (:file:`MANIFEST` by default) exists and the first line + does not have a comment indicating it is generated from :file:`MANIFEST.in`, + then it is used as is, unaltered + +* if the manifest file doesn't exist or has been previously automatically + generated, read :file:`MANIFEST.in` and create the manifest * if neither :file:`MANIFEST` nor :file:`MANIFEST.in` exist, create a manifest with just the default file set @@ -204,8 +218,3 @@ python setup.py sdist --manifest-only :option:`-o` is a shortcut for :option:`--manifest-only`. - -.. versionchanged:: 3.1 - An existing generated :file:`MANIFEST` will be regenerated without - :command:`sdist` comparing its modification time to the one of - :file:`MANIFEST.in` or :file:`setup.py`. diff --git a/Lib/distutils/command/sdist.py b/Lib/distutils/command/sdist.py --- a/Lib/distutils/command/sdist.py +++ b/Lib/distutils/command/sdist.py @@ -174,14 +174,20 @@ reading the manifest, or just using the default file set -- it all depends on the user's options. """ - # new behavior: + # new behavior when using a template: # the file list is recalculated everytime because # even if MANIFEST.in or setup.py are not changed # the user might have added some files in the tree that # need to be included. # - # This makes --force the default and only behavior. + # This makes --force the default and only behavior with templates. template_exists = os.path.isfile(self.template) + if not template_exists and self._manifest_is_not_generated(): + self.read_manifest() + self.filelist.sort() + self.filelist.remove_duplicates() + return + if not template_exists: self.warn(("manifest template '%s' does not exist " + "(using default file list)") % @@ -336,23 +342,28 @@ by 'add_defaults()' and 'read_template()') to the manifest file named by 'self.manifest'. """ - if os.path.isfile(self.manifest): - fp = open(self.manifest) - try: - first_line = fp.readline() - finally: - fp.close() - - if first_line != '# file GENERATED by distutils, do NOT edit\n': - log.info("not writing to manually maintained " - "manifest file '%s'" % self.manifest) - return + if self._manifest_is_not_generated(): + log.info("not writing to manually maintained " + "manifest file '%s'" % self.manifest) + return content = self.filelist.files[:] content.insert(0, '# file GENERATED by distutils, do NOT edit') self.execute(file_util.write_file, (self.manifest, content), "writing manifest file '%s'" % self.manifest) + def _manifest_is_not_generated(self): + # check for special comment used in 3.1.3 and higher + if not os.path.isfile(self.manifest): + return False + + fp = open(self.manifest) + try: + first_line = fp.readline() + finally: + fp.close() + return first_line != '# file GENERATED by distutils, do NOT edit\n' + def read_manifest(self): """Read the manifest file (named by 'self.manifest') and use it to fill in 'self.filelist', the list of files to include in the source @@ -360,12 +371,11 @@ """ log.info("reading manifest file '%s'", self.manifest) manifest = open(self.manifest) - while True: - line = manifest.readline() - if line == '': # end of file - break - if line[-1] == '\n': - line = line[0:-1] + for line in manifest: + # ignore comments and blank lines + line = line.strip() + if line.startswith('#') or not line: + continue self.filelist.append(line) manifest.close() diff --git a/Lib/distutils/tests/test_sdist.py b/Lib/distutils/tests/test_sdist.py --- a/Lib/distutils/tests/test_sdist.py +++ b/Lib/distutils/tests/test_sdist.py @@ -1,21 +1,19 @@ """Tests for distutils.command.sdist.""" import os +import tarfile import unittest -import shutil +import warnings import zipfile from os.path import join -import sys -import tempfile -import warnings +from textwrap import dedent from test.support import captured_stdout, check_warnings, run_unittest from distutils.command.sdist import sdist, show_formats from distutils.core import Distribution from distutils.tests.test_config import PyPIRCCommandTestCase -from distutils.errors import DistutilsExecError, DistutilsOptionError +from distutils.errors import DistutilsOptionError from distutils.spawn import find_executable -from distutils.tests import support from distutils.log import WARN from distutils.archive_util import ARCHIVE_FORMATS @@ -346,13 +344,33 @@ self.assertEqual(manifest[0], '# file GENERATED by distutils, do NOT edit') + @unittest.skipUnless(ZLIB_SUPPORT, "Need zlib support to run") + def test_manifest_comments(self): + # make sure comments don't cause exceptions or wrong includes + contents = dedent("""\ + # bad.py + #bad.py + good.py + """) + dist, cmd = self.get_cmd() + cmd.ensure_finalized() + self.write_file((self.tmp_dir, cmd.manifest), contents) + self.write_file((self.tmp_dir, 'good.py'), '# pick me!') + self.write_file((self.tmp_dir, 'bad.py'), "# don't pick me!") + self.write_file((self.tmp_dir, '#bad.py'), "# don't pick me!") + cmd.run() + self.assertEqual(cmd.filelist.files, ['good.py']) + @unittest.skipUnless(ZLIB_SUPPORT, 'Need zlib support to run') def test_manual_manifest(self): # check that a MANIFEST without a marker is left alone dist, cmd = self.get_cmd() cmd.ensure_finalized() self.write_file((self.tmp_dir, cmd.manifest), 'README.manual') + self.write_file((self.tmp_dir, 'README.manual'), + 'This project maintains its MANIFEST file itself.') cmd.run() + self.assertEqual(cmd.filelist.files, ['README.manual']) f = open(cmd.manifest) try: @@ -363,6 +381,15 @@ self.assertEqual(manifest, ['README.manual']) + archive_name = join(self.tmp_dir, 'dist', 'fake-1.0.tar.gz') + archive = tarfile.open(archive_name) + try: + filenames = [tarinfo.name for tarinfo in archive] + finally: + archive.close() + self.assertEqual(sorted(filenames), ['fake-1.0', 'fake-1.0/PKG-INFO', + 'fake-1.0/README.manual']) + def test_suite(): return unittest.makeSuite(SDistTestCase) diff --git a/Lib/lib2to3/tests/test_refactor.py b/Lib/lib2to3/tests/test_refactor.py --- a/Lib/lib2to3/tests/test_refactor.py +++ b/Lib/lib2to3/tests/test_refactor.py @@ -177,22 +177,26 @@ self.assertEqual(results, expected) def check_file_refactoring(self, test_file, fixers=_2TO3_FIXERS): + tmpdir = tempfile.mkdtemp(prefix="2to3-test_refactor") + self.addCleanup(shutil.rmtree, tmpdir) + # make a copy of the tested file that we can write to + shutil.copy(test_file, tmpdir) + test_file = os.path.join(tmpdir, os.path.basename(test_file)) + os.chmod(test_file, 0o644) + def read_file(): with open(test_file, "rb") as fp: return fp.read() + old_contents = read_file() rt = self.rt(fixers=fixers) rt.refactor_file(test_file) self.assertEqual(old_contents, read_file()) - try: - rt.refactor_file(test_file, True) - new_contents = read_file() - self.assertNotEqual(old_contents, new_contents) - finally: - with open(test_file, "wb") as fp: - fp.write(old_contents) + rt.refactor_file(test_file, True) + new_contents = read_file() + self.assertNotEqual(old_contents, new_contents) return new_contents def test_refactor_file(self): diff --git a/Lib/packaging/tests/test_database.py b/Lib/packaging/tests/test_database.py --- a/Lib/packaging/tests/test_database.py +++ b/Lib/packaging/tests/test_database.py @@ -39,20 +39,40 @@ return [path, digest, size] -class CommonDistributionTests: +class FakeDistsMixin: + + def setUp(self): + super(FakeDistsMixin, self).setUp() + self.addCleanup(enable_cache) + disable_cache() + + # make a copy that we can write into for our fake installed + # distributions + tmpdir = tempfile.mkdtemp() + self.addCleanup(shutil.rmtree, tmpdir) + self.fake_dists_path = os.path.join(tmpdir, 'fake_dists') + fake_dists_src = os.path.abspath( + os.path.join(os.path.dirname(__file__), 'fake_dists')) + shutil.copytree(fake_dists_src, self.fake_dists_path) + # XXX ugly workaround: revert copystat calls done by shutil behind our + # back (to avoid getting a read-only copy of a read-only file). we + # could pass a custom copy_function to change the mode of files, but + # shutil gives no control over the mode of directories :( + for root, dirs, files in os.walk(self.fake_dists_path): + os.chmod(root, 0o755) + for f in files: + os.chmod(os.path.join(root, f), 0o644) + for d in dirs: + os.chmod(os.path.join(root, d), 0o755) + + +class CommonDistributionTests(FakeDistsMixin): """Mixin used to test the interface common to both Distribution classes. Derived classes define cls, sample_dist, dirs and records. These attributes are used in test methods. See source code for details. """ - def setUp(self): - super(CommonDistributionTests, self).setUp() - self.addCleanup(enable_cache) - disable_cache() - self.fake_dists_path = os.path.abspath( - os.path.join(os.path.dirname(__file__), 'fake_dists')) - def test_instantiation(self): # check that useful attributes are here name, version, distdir = self.sample_dist @@ -110,6 +130,7 @@ self.records = {} for distinfo_dir in self.dirs: + record_file = os.path.join(distinfo_dir, 'RECORD') with open(record_file, 'w') as file: record_writer = csv.writer( @@ -138,12 +159,6 @@ record_data[path] = md5_, size self.records[distinfo_dir] = record_data - def tearDown(self): - for distinfo_dir in self.dirs: - record_file = os.path.join(distinfo_dir, 'RECORD') - open(record_file, 'wb').close() - super(TestDistribution, self).tearDown() - def test_instantiation(self): super(TestDistribution, self).test_instantiation() self.assertIsInstance(self.dist.requested, bool) @@ -252,20 +267,13 @@ class TestDatabase(support.LoggingCatcher, + FakeDistsMixin, unittest.TestCase): def setUp(self): super(TestDatabase, self).setUp() - disable_cache() - # Setup the path environment with our fake distributions - current_path = os.path.abspath(os.path.dirname(__file__)) - self.fake_dists_path = os.path.join(current_path, 'fake_dists') sys.path.insert(0, self.fake_dists_path) - - def tearDown(self): - sys.path.remove(self.fake_dists_path) - enable_cache() - super(TestDatabase, self).tearDown() + self.addCleanup(sys.path.remove, self.fake_dists_path) def test_distinfo_dirname(self): # Given a name and a version, we expect the distinfo_dirname function diff --git a/Misc/ACKS b/Misc/ACKS --- a/Misc/ACKS +++ b/Misc/ACKS @@ -228,6 +228,7 @@ Arnaud Delobelle Konrad Delong Erik Demaine +John Dennis Roger Dev Raghuram Devarakonda Caleb Deveraux @@ -931,6 +932,7 @@ Tobias Thelen James Thomas Robin Thomas +Stephen Thorne Jeremy Thurgood Eric Tiedemann July Tikhonov diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -249,6 +249,9 @@ Library ------- +- Issues #11104, #8688: Fix the behavior of distutils' sdist command with + manually-maintained MANIFEST files. + - Issue #11281: smtplib.STMP gets source_address parameter, which adds the ability to bind to specific source address on a machine with multiple interfaces. Patch by Paulo Scardine. @@ -1144,6 +1147,12 @@ Tests ----- +- Issue #12331: The test suite for the packaging module can now run from an + installed Python. + +- Issue #12331: The test suite for lib2to3 can now run from an installed + Python. + - Issue #12626: In regrtest, allow to filter tests using a glob filter with the ``-m`` (or ``--match``) option. This works with all test cases using the unittest module. This is useful with long test suites diff --git a/Tools/scripts/patchcheck.py b/Tools/scripts/patchcheck.py --- a/Tools/scripts/patchcheck.py +++ b/Tools/scripts/patchcheck.py @@ -4,11 +4,15 @@ import shutil import os.path import subprocess +import sysconfig import reindent import untabify +SRCDIR = sysconfig.get_config_var('srcdir') + + def n_files_str(count): """Return 'N file(s)' with the proper plurality on 'file'.""" return "{} file{}".format(count, "s" if count != 1 else "") @@ -35,18 +39,13 @@ @status("Getting the list of files that have been added/changed", info=lambda x: n_files_str(len(x))) def changed_files(): - """Get the list of changed or added files from the VCS.""" - if os.path.isdir('.hg'): - cmd = 'hg status --added --modified --no-status' - else: + """Get the list of changed or added files from Mercurial.""" + if not os.path.isdir(os.path.join(SRCDIR, '.hg')): sys.exit('need a checkout to get modified files') - st = subprocess.Popen(cmd.split(), stdout=subprocess.PIPE) - try: - st.wait() + cmd = 'hg status --added --modified --no-status' + with subprocess.Popen(cmd.split(), stdout=subprocess.PIPE) as st: return [x.decode().rstrip() for x in st.stdout] - finally: - st.stdout.close() def report_modified_files(file_paths): @@ -65,7 +64,7 @@ """Make sure that the whitespace for .py files have been normalized.""" reindent.makebackup = False # No need to create backups. fixed = [path for path in file_paths if path.endswith('.py') and - reindent.check(path)] + reindent.check(os.path.join(SRCDIR, path))] return fixed @@ -74,10 +73,11 @@ """Report if any C files """ fixed = [] for path in file_paths: - with open(path, 'r') as f: + abspath = os.path.join(SRCDIR, path) + with open(abspath, 'r') as f: if '\t' not in f.read(): continue - untabify.process(path, 8, verbose=False) + untabify.process(abspath, 8, verbose=False) fixed.append(path) return fixed @@ -88,13 +88,14 @@ def normalize_docs_whitespace(file_paths): fixed = [] for path in file_paths: + abspath = os.path.join(SRCDIR, path) try: - with open(path, 'rb') as f: + with open(abspath, 'rb') as f: lines = f.readlines() new_lines = [ws_re.sub(br'\1', line) for line in lines] if new_lines != lines: - shutil.copyfile(path, path + '.bak') - with open(path, 'wb') as f: + shutil.copyfile(abspath, abspath + '.bak') + with open(abspath, 'wb') as f: f.writelines(new_lines) fixed.append(path) except Exception as err: -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Aug 1 14:45:29 2011 From: python-checkins at python.org (eric.araujo) Date: Mon, 01 Aug 2011 14:45:29 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=28merge_3=2E2_-=3E_default=29?= =?utf8?q?=3A_Merge_3=2E2?= Message-ID: http://hg.python.org/cpython/rev/7e9bfd0395d1 changeset: 71671:7e9bfd0395d1 parent: 71670:abd8380c15e2 parent: 71669:da79c5d0ed17 user: ?ric Araujo date: Mon Aug 01 14:45:01 2011 +0200 summary: Merge 3.2 files: -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Aug 1 14:49:56 2011 From: python-checkins at python.org (eric.araujo) Date: Mon, 01 Aug 2011 14:49:56 +0200 Subject: [Python-checkins] =?utf8?b?Y3B5dGhvbiAoMi43KTogTGV0IOKAnG1ha2Ug?= =?utf8?q?patchcheck=E2=80=9D_work_for_out-of-dir_builds_=28=239860=29?= Message-ID: http://hg.python.org/cpython/rev/bea11ce24bb0 changeset: 71672:bea11ce24bb0 branch: 2.7 parent: 71628:5a248fcfa112 user: ?ric Araujo date: Sat Jul 30 21:34:04 2011 +0200 summary: Let ?make patchcheck? work for out-of-dir builds (#9860) files: Tools/scripts/patchcheck.py | 20 +++++++++++++------- 1 files changed, 13 insertions(+), 7 deletions(-) diff --git a/Tools/scripts/patchcheck.py b/Tools/scripts/patchcheck.py --- a/Tools/scripts/patchcheck.py +++ b/Tools/scripts/patchcheck.py @@ -4,11 +4,15 @@ import shutil import os.path import subprocess +import sysconfig import reindent import untabify +SRCDIR = sysconfig.get_config_var('srcdir') + + def n_files_str(count): """Return 'N file(s)' with the proper plurality on 'file'.""" return "{} file{}".format(count, "s" if count != 1 else "") @@ -36,7 +40,7 @@ info=lambda x: n_files_str(len(x))) def changed_files(): """Get the list of changed or added files from the VCS.""" - if os.path.isdir('.hg'): + if os.path.isdir(os.path.join(SRCDIR, '.hg')): vcs = 'hg' cmd = 'hg status --added --modified --no-status' elif os.path.isdir('.svn'): @@ -75,7 +79,7 @@ reindent.makebackup = False # No need to create backups. fixed = [] for path in (x for x in file_paths if x.endswith('.py')): - if reindent.check(path): + if reindent.check(os.path.join(SRCDIR, path)): fixed.append(path) return fixed @@ -85,10 +89,11 @@ """Report if any C files """ fixed = [] for path in file_paths: - with open(path, 'r') as f: + abspath = os.path.join(SRCDIR, path) + with open(abspath, 'r') as f: if '\t' not in f.read(): continue - untabify.process(path, 8, verbose=False) + untabify.process(abspath, 8, verbose=False) fixed.append(path) return fixed @@ -99,13 +104,14 @@ def normalize_docs_whitespace(file_paths): fixed = [] for path in file_paths: + abspath = os.path.join(SRCDIR, path) try: - with open(path, 'rb') as f: + with open(abspath, 'rb') as f: lines = f.readlines() new_lines = [ws_re.sub(br'\1', line) for line in lines] if new_lines != lines: - shutil.copyfile(path, path + '.bak') - with open(path, 'wb') as f: + shutil.copyfile(abspath, abspath + '.bak') + with open(abspath, 'wb') as f: f.writelines(new_lines) fixed.append(path) except Exception as err: -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Aug 1 14:49:57 2011 From: python-checkins at python.org (eric.araujo) Date: Mon, 01 Aug 2011 14:49:57 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=282=2E7=29=3A_Fix_regression_?= =?utf8?q?with_distutils_MANIFEST_handing_=28=2311104=2C_=238688=29=2E?= Message-ID: http://hg.python.org/cpython/rev/21feea7f35e5 changeset: 71673:21feea7f35e5 branch: 2.7 user: ?ric Araujo date: Sun Jul 31 02:04:00 2011 +0200 summary: Fix regression with distutils MANIFEST handing (#11104, #8688). The changed behavior of sdist in 2.7 broke packaging for projects that wanted to use a manually-maintained MANIFEST file (instead of having a MANIFEST.in template and letting distutils generate the MANIFEST). The fixes that were committed for #8688 (d29399100973 by Tarek and f7639dcdffc3 by me) did not fix all issues exposed in the bug report, and also added one problem: the MANIFEST file format gained comments, but the read_manifest method was not updated to handle (i.e. ignore) them. This changeset should fix everything; the tests have been expanded and I successfully tested with Mercurial, which suffered from this regression. I have grouped the versionchanged directives for these bugs in one place and added micro version numbers to help users know the quirks of the exact version they?re using. I also removed a stanza in the docs that was forgotten in Tarek?s first changeset. Initial report, thorough diagnosis and patch by John Dennis, further work on the patch by Stephen Thorne, and a few edits and additions by me. files: Doc/distutils/sourcedist.rst | 29 ++++++---- Lib/distutils/command/sdist.py | 48 +++++++++++------- Lib/distutils/tests/test_sdist.py | 43 +++++++++++++--- Misc/ACKS | 2 + Misc/NEWS | 3 + 5 files changed, 85 insertions(+), 40 deletions(-) diff --git a/Doc/distutils/sourcedist.rst b/Doc/distutils/sourcedist.rst --- a/Doc/distutils/sourcedist.rst +++ b/Doc/distutils/sourcedist.rst @@ -111,12 +111,22 @@ :file:`MANIFEST`, you must specify everything: the default set of files described above does not apply in this case. -.. versionadded:: 2.7 +.. versionchanged:: 2.7 + An existing generated :file:`MANIFEST` will be regenerated without + :command:`sdist` comparing its modification time to the one of + :file:`MANIFEST.in` or :file:`setup.py`. + +.. versionchanged:: 2.7.1 :file:`MANIFEST` files start with a comment indicating they are generated. Files without this comment are not overwritten or removed. +.. versionchanged:: 2.7.3 + :command:`sdist` will read a :file:`MANIFEST` file if no :file:`MANIFEST.in` + exists, like it did before 2.7. + See :ref:`manifest_template` section for a syntax reference. + .. _manifest-options: Manifest-related options @@ -124,16 +134,16 @@ The normal course of operations for the :command:`sdist` command is as follows: -* if the manifest file, :file:`MANIFEST` doesn't exist, read :file:`MANIFEST.in` - and create the manifest +* if the manifest file (:file:`MANIFEST` by default) exists and the first line + does not have a comment indicating it is generated from :file:`MANIFEST.in`, + then it is used as is, unaltered + +* if the manifest file doesn't exist or has been previously automatically + generated, read :file:`MANIFEST.in` and create the manifest * if neither :file:`MANIFEST` nor :file:`MANIFEST.in` exist, create a manifest with just the default file set -* if either :file:`MANIFEST.in` or the setup script (:file:`setup.py`) are more - recent than :file:`MANIFEST`, recreate :file:`MANIFEST` by reading - :file:`MANIFEST.in` - * use the list of files now in :file:`MANIFEST` (either just generated or read in) to create the source distribution archive(s) @@ -271,8 +281,3 @@ ``a-z``, ``a-zA-Z``, ``a-f0-9_.``). The definition of "regular filename character" is platform-specific: on Unix it is anything except slash; on Windows anything except backslash or colon. - -.. versionchanged:: 2.7 - An existing generated :file:`MANIFEST` will be regenerated without - :command:`sdist` comparing its modification time to the one of - :file:`MANIFEST.in` or :file:`setup.py`. diff --git a/Lib/distutils/command/sdist.py b/Lib/distutils/command/sdist.py --- a/Lib/distutils/command/sdist.py +++ b/Lib/distutils/command/sdist.py @@ -182,14 +182,20 @@ reading the manifest, or just using the default file set -- it all depends on the user's options. """ - # new behavior: + # new behavior when using a template: # the file list is recalculated everytime because # even if MANIFEST.in or setup.py are not changed # the user might have added some files in the tree that # need to be included. # - # This makes --force the default and only behavior. + # This makes --force the default and only behavior with templates. template_exists = os.path.isfile(self.template) + if not template_exists and self._manifest_is_not_generated(): + self.read_manifest() + self.filelist.sort() + self.filelist.remove_duplicates() + return + if not template_exists: self.warn(("manifest template '%s' does not exist " + "(using default file list)") % @@ -352,23 +358,28 @@ by 'add_defaults()' and 'read_template()') to the manifest file named by 'self.manifest'. """ - if os.path.isfile(self.manifest): - fp = open(self.manifest) - try: - first_line = fp.readline() - finally: - fp.close() - - if first_line != '# file GENERATED by distutils, do NOT edit\n': - log.info("not writing to manually maintained " - "manifest file '%s'" % self.manifest) - return + if self._manifest_is_not_generated(): + log.info("not writing to manually maintained " + "manifest file '%s'" % self.manifest) + return content = self.filelist.files[:] content.insert(0, '# file GENERATED by distutils, do NOT edit') self.execute(file_util.write_file, (self.manifest, content), "writing manifest file '%s'" % self.manifest) + def _manifest_is_not_generated(self): + # check for special comment used in 2.7.1 and higher + if not os.path.isfile(self.manifest): + return False + + fp = open(self.manifest, 'rU') + try: + first_line = fp.readline() + finally: + fp.close() + return first_line != '# file GENERATED by distutils, do NOT edit\n' + def read_manifest(self): """Read the manifest file (named by 'self.manifest') and use it to fill in 'self.filelist', the list of files to include in the source @@ -376,12 +387,11 @@ """ log.info("reading manifest file '%s'", self.manifest) manifest = open(self.manifest) - while 1: - line = manifest.readline() - if line == '': # end of file - break - if line[-1] == '\n': - line = line[0:-1] + for line in manifest: + # ignore comments and blank lines + line = line.strip() + if line.startswith('#') or not line: + continue self.filelist.append(line) manifest.close() diff --git a/Lib/distutils/tests/test_sdist.py b/Lib/distutils/tests/test_sdist.py --- a/Lib/distutils/tests/test_sdist.py +++ b/Lib/distutils/tests/test_sdist.py @@ -1,9 +1,11 @@ """Tests for distutils.command.sdist.""" import os +import tarfile import unittest -import shutil +import warnings import zipfile -import tarfile +from os.path import join +from textwrap import dedent # zlib is not used here, but if it's not available # the tests that use zipfile may fail @@ -19,19 +21,13 @@ except ImportError: UID_GID_SUPPORT = False -from os.path import join -import sys -import tempfile -import warnings - from test.test_support import captured_stdout, check_warnings, run_unittest from distutils.command.sdist import sdist, show_formats from distutils.core import Distribution from distutils.tests.test_config import PyPIRCCommandTestCase -from distutils.errors import DistutilsExecError, DistutilsOptionError +from distutils.errors import DistutilsOptionError from distutils.spawn import find_executable -from distutils.tests import support from distutils.log import WARN from distutils.archive_util import ARCHIVE_FORMATS @@ -405,13 +401,33 @@ self.assertEqual(manifest[0], '# file GENERATED by distutils, do NOT edit') + @unittest.skipUnless(zlib, 'requires zlib') + def test_manifest_comments(self): + # make sure comments don't cause exceptions or wrong includes + contents = dedent("""\ + # bad.py + #bad.py + good.py + """) + dist, cmd = self.get_cmd() + cmd.ensure_finalized() + self.write_file((self.tmp_dir, cmd.manifest), contents) + self.write_file((self.tmp_dir, 'good.py'), '# pick me!') + self.write_file((self.tmp_dir, 'bad.py'), "# don't pick me!") + self.write_file((self.tmp_dir, '#bad.py'), "# don't pick me!") + cmd.run() + self.assertEqual(cmd.filelist.files, ['good.py']) + @unittest.skipUnless(zlib, "requires zlib") def test_manual_manifest(self): # check that a MANIFEST without a marker is left alone dist, cmd = self.get_cmd() cmd.ensure_finalized() self.write_file((self.tmp_dir, cmd.manifest), 'README.manual') + self.write_file((self.tmp_dir, 'README.manual'), + 'This project maintains its MANIFEST file itself.') cmd.run() + self.assertEqual(cmd.filelist.files, ['README.manual']) f = open(cmd.manifest) try: @@ -422,6 +438,15 @@ self.assertEqual(manifest, ['README.manual']) + archive_name = join(self.tmp_dir, 'dist', 'fake-1.0.tar.gz') + archive = tarfile.open(archive_name) + try: + filenames = [tarinfo.name for tarinfo in archive] + finally: + archive.close() + self.assertEqual(sorted(filenames), ['fake-1.0', 'fake-1.0/PKG-INFO', + 'fake-1.0/README.manual']) + def test_suite(): return unittest.makeSuite(SDistTestCase) diff --git a/Misc/ACKS b/Misc/ACKS --- a/Misc/ACKS +++ b/Misc/ACKS @@ -194,6 +194,7 @@ Vincent Delft Arnaud Delobelle Erik Demaine +John Dennis Roger Dev Raghuram Devarakonda Catherine Devlin @@ -813,6 +814,7 @@ Tobias Thelen James Thomas Robin Thomas +Stephen Thorne Eric Tiedemann Tracy Tims Oren Tirosh diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -37,6 +37,9 @@ Library ------- +- Issues #11104, #8688: Fix the behavior of distutils' sdist command with + manually-maintained MANIFEST files. + - Issue #8887: "pydoc somebuiltin.somemethod" (or help('somebuiltin.somemethod') in Python code) now finds the doc of the method. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Aug 1 14:49:58 2011 From: python-checkins at python.org (eric.araujo) Date: Mon, 01 Aug 2011 14:49:58 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=282=2E7=29=3A_Stop_trying_to_?= =?utf8?q?write_into_the_stdlib_during_lib2to3_tests_=28=2312331=29=2E?= Message-ID: http://hg.python.org/cpython/rev/a425408f1e52 changeset: 71674:a425408f1e52 branch: 2.7 user: ?ric Araujo date: Sun Jul 31 17:59:28 2011 +0200 summary: Stop trying to write into the stdlib during lib2to3 tests (#12331). This prevents tests from failing when run from a Python installed in a read-only directory. files: Lib/lib2to3/tests/test_refactor.py | 18 +++++++++++------- Misc/NEWS | 3 +++ 2 files changed, 14 insertions(+), 7 deletions(-) diff --git a/Lib/lib2to3/tests/test_refactor.py b/Lib/lib2to3/tests/test_refactor.py --- a/Lib/lib2to3/tests/test_refactor.py +++ b/Lib/lib2to3/tests/test_refactor.py @@ -177,22 +177,26 @@ self.assertEqual(results, expected) def check_file_refactoring(self, test_file, fixers=_2TO3_FIXERS): + tmpdir = tempfile.mkdtemp(prefix="2to3-test_refactor") + self.addCleanup(shutil.rmtree, tmpdir) + # make a copy of the tested file that we can write to + shutil.copy(test_file, tmpdir) + test_file = os.path.join(tmpdir, os.path.basename(test_file)) + os.chmod(test_file, 0o644) + def read_file(): with open(test_file, "rb") as fp: return fp.read() + old_contents = read_file() rt = self.rt(fixers=fixers) rt.refactor_file(test_file) self.assertEqual(old_contents, read_file()) - try: - rt.refactor_file(test_file, True) - new_contents = read_file() - self.assertNotEqual(old_contents, new_contents) - finally: - with open(test_file, "wb") as fp: - fp.write(old_contents) + rt.refactor_file(test_file, True) + new_contents = read_file() + self.assertNotEqual(old_contents, new_contents) return new_contents def test_refactor_file(self): diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -161,6 +161,9 @@ Tests ----- +- Issue #12331: The test suite for lib2to3 can now run from an installed + Python. + - Issue #12549: Correct test_platform to not fail when OS X returns 'x86_64' as the processor type on some Mac systems. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Aug 1 14:49:59 2011 From: python-checkins at python.org (eric.araujo) Date: Mon, 01 Aug 2011 14:49:59 +0200 Subject: [Python-checkins] =?utf8?b?Y3B5dGhvbiAobWVyZ2UgMi43IC0+IDIuNyk6?= =?utf8?q?_Branch_merge?= Message-ID: http://hg.python.org/cpython/rev/8065f927b4f6 changeset: 71675:8065f927b4f6 branch: 2.7 parent: 71660:42f40f53fd73 parent: 71674:a425408f1e52 user: ?ric Araujo date: Mon Aug 01 14:48:19 2011 +0200 summary: Branch merge files: Doc/distutils/sourcedist.rst | 29 ++++++---- Lib/distutils/command/sdist.py | 48 ++++++++++------- Lib/distutils/tests/test_sdist.py | 43 ++++++++++++--- Lib/lib2to3/tests/test_refactor.py | 18 ++++-- Misc/ACKS | 2 + Misc/NEWS | 6 ++ Tools/scripts/patchcheck.py | 20 ++++-- 7 files changed, 112 insertions(+), 54 deletions(-) diff --git a/Doc/distutils/sourcedist.rst b/Doc/distutils/sourcedist.rst --- a/Doc/distutils/sourcedist.rst +++ b/Doc/distutils/sourcedist.rst @@ -111,12 +111,22 @@ :file:`MANIFEST`, you must specify everything: the default set of files described above does not apply in this case. -.. versionadded:: 2.7 +.. versionchanged:: 2.7 + An existing generated :file:`MANIFEST` will be regenerated without + :command:`sdist` comparing its modification time to the one of + :file:`MANIFEST.in` or :file:`setup.py`. + +.. versionchanged:: 2.7.1 :file:`MANIFEST` files start with a comment indicating they are generated. Files without this comment are not overwritten or removed. +.. versionchanged:: 2.7.3 + :command:`sdist` will read a :file:`MANIFEST` file if no :file:`MANIFEST.in` + exists, like it did before 2.7. + See :ref:`manifest_template` section for a syntax reference. + .. _manifest-options: Manifest-related options @@ -124,16 +134,16 @@ The normal course of operations for the :command:`sdist` command is as follows: -* if the manifest file, :file:`MANIFEST` doesn't exist, read :file:`MANIFEST.in` - and create the manifest +* if the manifest file (:file:`MANIFEST` by default) exists and the first line + does not have a comment indicating it is generated from :file:`MANIFEST.in`, + then it is used as is, unaltered + +* if the manifest file doesn't exist or has been previously automatically + generated, read :file:`MANIFEST.in` and create the manifest * if neither :file:`MANIFEST` nor :file:`MANIFEST.in` exist, create a manifest with just the default file set -* if either :file:`MANIFEST.in` or the setup script (:file:`setup.py`) are more - recent than :file:`MANIFEST`, recreate :file:`MANIFEST` by reading - :file:`MANIFEST.in` - * use the list of files now in :file:`MANIFEST` (either just generated or read in) to create the source distribution archive(s) @@ -271,8 +281,3 @@ ``a-z``, ``a-zA-Z``, ``a-f0-9_.``). The definition of "regular filename character" is platform-specific: on Unix it is anything except slash; on Windows anything except backslash or colon. - -.. versionchanged:: 2.7 - An existing generated :file:`MANIFEST` will be regenerated without - :command:`sdist` comparing its modification time to the one of - :file:`MANIFEST.in` or :file:`setup.py`. diff --git a/Lib/distutils/command/sdist.py b/Lib/distutils/command/sdist.py --- a/Lib/distutils/command/sdist.py +++ b/Lib/distutils/command/sdist.py @@ -182,14 +182,20 @@ reading the manifest, or just using the default file set -- it all depends on the user's options. """ - # new behavior: + # new behavior when using a template: # the file list is recalculated everytime because # even if MANIFEST.in or setup.py are not changed # the user might have added some files in the tree that # need to be included. # - # This makes --force the default and only behavior. + # This makes --force the default and only behavior with templates. template_exists = os.path.isfile(self.template) + if not template_exists and self._manifest_is_not_generated(): + self.read_manifest() + self.filelist.sort() + self.filelist.remove_duplicates() + return + if not template_exists: self.warn(("manifest template '%s' does not exist " + "(using default file list)") % @@ -352,23 +358,28 @@ by 'add_defaults()' and 'read_template()') to the manifest file named by 'self.manifest'. """ - if os.path.isfile(self.manifest): - fp = open(self.manifest) - try: - first_line = fp.readline() - finally: - fp.close() - - if first_line != '# file GENERATED by distutils, do NOT edit\n': - log.info("not writing to manually maintained " - "manifest file '%s'" % self.manifest) - return + if self._manifest_is_not_generated(): + log.info("not writing to manually maintained " + "manifest file '%s'" % self.manifest) + return content = self.filelist.files[:] content.insert(0, '# file GENERATED by distutils, do NOT edit') self.execute(file_util.write_file, (self.manifest, content), "writing manifest file '%s'" % self.manifest) + def _manifest_is_not_generated(self): + # check for special comment used in 2.7.1 and higher + if not os.path.isfile(self.manifest): + return False + + fp = open(self.manifest, 'rU') + try: + first_line = fp.readline() + finally: + fp.close() + return first_line != '# file GENERATED by distutils, do NOT edit\n' + def read_manifest(self): """Read the manifest file (named by 'self.manifest') and use it to fill in 'self.filelist', the list of files to include in the source @@ -376,12 +387,11 @@ """ log.info("reading manifest file '%s'", self.manifest) manifest = open(self.manifest) - while 1: - line = manifest.readline() - if line == '': # end of file - break - if line[-1] == '\n': - line = line[0:-1] + for line in manifest: + # ignore comments and blank lines + line = line.strip() + if line.startswith('#') or not line: + continue self.filelist.append(line) manifest.close() diff --git a/Lib/distutils/tests/test_sdist.py b/Lib/distutils/tests/test_sdist.py --- a/Lib/distutils/tests/test_sdist.py +++ b/Lib/distutils/tests/test_sdist.py @@ -1,9 +1,11 @@ """Tests for distutils.command.sdist.""" import os +import tarfile import unittest -import shutil +import warnings import zipfile -import tarfile +from os.path import join +from textwrap import dedent # zlib is not used here, but if it's not available # the tests that use zipfile may fail @@ -19,19 +21,13 @@ except ImportError: UID_GID_SUPPORT = False -from os.path import join -import sys -import tempfile -import warnings - from test.test_support import captured_stdout, check_warnings, run_unittest from distutils.command.sdist import sdist, show_formats from distutils.core import Distribution from distutils.tests.test_config import PyPIRCCommandTestCase -from distutils.errors import DistutilsExecError, DistutilsOptionError +from distutils.errors import DistutilsOptionError from distutils.spawn import find_executable -from distutils.tests import support from distutils.log import WARN from distutils.archive_util import ARCHIVE_FORMATS @@ -405,13 +401,33 @@ self.assertEqual(manifest[0], '# file GENERATED by distutils, do NOT edit') + @unittest.skipUnless(zlib, 'requires zlib') + def test_manifest_comments(self): + # make sure comments don't cause exceptions or wrong includes + contents = dedent("""\ + # bad.py + #bad.py + good.py + """) + dist, cmd = self.get_cmd() + cmd.ensure_finalized() + self.write_file((self.tmp_dir, cmd.manifest), contents) + self.write_file((self.tmp_dir, 'good.py'), '# pick me!') + self.write_file((self.tmp_dir, 'bad.py'), "# don't pick me!") + self.write_file((self.tmp_dir, '#bad.py'), "# don't pick me!") + cmd.run() + self.assertEqual(cmd.filelist.files, ['good.py']) + @unittest.skipUnless(zlib, "requires zlib") def test_manual_manifest(self): # check that a MANIFEST without a marker is left alone dist, cmd = self.get_cmd() cmd.ensure_finalized() self.write_file((self.tmp_dir, cmd.manifest), 'README.manual') + self.write_file((self.tmp_dir, 'README.manual'), + 'This project maintains its MANIFEST file itself.') cmd.run() + self.assertEqual(cmd.filelist.files, ['README.manual']) f = open(cmd.manifest) try: @@ -422,6 +438,15 @@ self.assertEqual(manifest, ['README.manual']) + archive_name = join(self.tmp_dir, 'dist', 'fake-1.0.tar.gz') + archive = tarfile.open(archive_name) + try: + filenames = [tarinfo.name for tarinfo in archive] + finally: + archive.close() + self.assertEqual(sorted(filenames), ['fake-1.0', 'fake-1.0/PKG-INFO', + 'fake-1.0/README.manual']) + def test_suite(): return unittest.makeSuite(SDistTestCase) diff --git a/Lib/lib2to3/tests/test_refactor.py b/Lib/lib2to3/tests/test_refactor.py --- a/Lib/lib2to3/tests/test_refactor.py +++ b/Lib/lib2to3/tests/test_refactor.py @@ -177,22 +177,26 @@ self.assertEqual(results, expected) def check_file_refactoring(self, test_file, fixers=_2TO3_FIXERS): + tmpdir = tempfile.mkdtemp(prefix="2to3-test_refactor") + self.addCleanup(shutil.rmtree, tmpdir) + # make a copy of the tested file that we can write to + shutil.copy(test_file, tmpdir) + test_file = os.path.join(tmpdir, os.path.basename(test_file)) + os.chmod(test_file, 0o644) + def read_file(): with open(test_file, "rb") as fp: return fp.read() + old_contents = read_file() rt = self.rt(fixers=fixers) rt.refactor_file(test_file) self.assertEqual(old_contents, read_file()) - try: - rt.refactor_file(test_file, True) - new_contents = read_file() - self.assertNotEqual(old_contents, new_contents) - finally: - with open(test_file, "wb") as fp: - fp.write(old_contents) + rt.refactor_file(test_file, True) + new_contents = read_file() + self.assertNotEqual(old_contents, new_contents) return new_contents def test_refactor_file(self): diff --git a/Misc/ACKS b/Misc/ACKS --- a/Misc/ACKS +++ b/Misc/ACKS @@ -194,6 +194,7 @@ Vincent Delft Arnaud Delobelle Erik Demaine +John Dennis Roger Dev Raghuram Devarakonda Catherine Devlin @@ -813,6 +814,7 @@ Tobias Thelen James Thomas Robin Thomas +Stephen Thorne Eric Tiedemann Tracy Tims Oren Tirosh diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -37,6 +37,9 @@ Library ------- +- Issues #11104, #8688: Fix the behavior of distutils' sdist command with + manually-maintained MANIFEST files. + - Issue #8887: "pydoc somebuiltin.somemethod" (or help('somebuiltin.somemethod') in Python code) now finds the doc of the method. @@ -158,6 +161,9 @@ Tests ----- +- Issue #12331: The test suite for lib2to3 can now run from an installed + Python. + - Issue #12549: Correct test_platform to not fail when OS X returns 'x86_64' as the processor type on some Mac systems. diff --git a/Tools/scripts/patchcheck.py b/Tools/scripts/patchcheck.py --- a/Tools/scripts/patchcheck.py +++ b/Tools/scripts/patchcheck.py @@ -4,11 +4,15 @@ import shutil import os.path import subprocess +import sysconfig import reindent import untabify +SRCDIR = sysconfig.get_config_var('srcdir') + + def n_files_str(count): """Return 'N file(s)' with the proper plurality on 'file'.""" return "{} file{}".format(count, "s" if count != 1 else "") @@ -36,7 +40,7 @@ info=lambda x: n_files_str(len(x))) def changed_files(): """Get the list of changed or added files from the VCS.""" - if os.path.isdir('.hg'): + if os.path.isdir(os.path.join(SRCDIR, '.hg')): vcs = 'hg' cmd = 'hg status --added --modified --no-status' elif os.path.isdir('.svn'): @@ -75,7 +79,7 @@ reindent.makebackup = False # No need to create backups. fixed = [] for path in (x for x in file_paths if x.endswith('.py')): - if reindent.check(path): + if reindent.check(os.path.join(SRCDIR, path)): fixed.append(path) return fixed @@ -85,10 +89,11 @@ """Report if any C files """ fixed = [] for path in file_paths: - with open(path, 'r') as f: + abspath = os.path.join(SRCDIR, path) + with open(abspath, 'r') as f: if '\t' not in f.read(): continue - untabify.process(path, 8, verbose=False) + untabify.process(abspath, 8, verbose=False) fixed.append(path) return fixed @@ -99,13 +104,14 @@ def normalize_docs_whitespace(file_paths): fixed = [] for path in file_paths: + abspath = os.path.join(SRCDIR, path) try: - with open(path, 'rb') as f: + with open(abspath, 'rb') as f: lines = f.readlines() new_lines = [ws_re.sub(br'\1', line) for line in lines] if new_lines != lines: - shutil.copyfile(path, path + '.bak') - with open(path, 'wb') as f: + shutil.copyfile(abspath, abspath + '.bak') + with open(abspath, 'wb') as f: f.writelines(new_lines) fixed.append(path) except Exception as err: -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Aug 1 16:11:32 2011 From: python-checkins at python.org (eric.araujo) Date: Mon, 01 Aug 2011 16:11:32 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Fix_bug_I_unwittingly_added?= =?utf8?q?_in_1521d9837d16_=28found_by_Ezio_Melotti=29?= Message-ID: http://hg.python.org/cpython/rev/4813fe208e29 changeset: 71676:4813fe208e29 parent: 71671:7e9bfd0395d1 user: ?ric Araujo date: Mon Aug 01 15:29:07 2011 +0200 summary: Fix bug I unwittingly added in 1521d9837d16 (found by Ezio Melotti) files: Lib/modulefinder.py | 3 +-- 1 files changed, 1 insertions(+), 2 deletions(-) diff --git a/Lib/modulefinder.py b/Lib/modulefinder.py --- a/Lib/modulefinder.py +++ b/Lib/modulefinder.py @@ -26,8 +26,7 @@ # A Public interface def AddPackagePath(packagename, path): - paths = packagePathMap.setdefault(packagename, []).append(path) - packagePathMap[packagename] = paths + packagePathMap.setdefault(packagename, []).append(path) replacePackageMap = {} -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Aug 1 17:31:47 2011 From: python-checkins at python.org (eric.araujo) Date: Mon, 01 Aug 2011 17:31:47 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=283=2E2=29=3A_Fix_resource_wa?= =?utf8?q?rning_when_looking_at_turtledemo=E2=80=99s_help_=28=2312295=29?= Message-ID: http://hg.python.org/cpython/rev/63bd8f42b511 changeset: 71677:63bd8f42b511 branch: 3.2 parent: 71669:da79c5d0ed17 user: ?ric Araujo date: Mon Aug 01 17:29:36 2011 +0200 summary: Fix resource warning when looking at turtledemo?s help (#12295) files: Lib/idlelib/textView.py | 5 +++-- 1 files changed, 3 insertions(+), 2 deletions(-) diff --git a/Lib/idlelib/textView.py b/Lib/idlelib/textView.py --- a/Lib/idlelib/textView.py +++ b/Lib/idlelib/textView.py @@ -62,14 +62,15 @@ def view_file(parent, title, filename, encoding=None): try: - textFile = open(filename, 'r', encoding=encoding) + with open(filename, 'r', encoding=encoding) as file: + contents = file.read() except IOError: import tkinter.messagebox as tkMessageBox tkMessageBox.showerror(title='File Load Error', message='Unable to load file %r .' % filename, parent=parent) else: - return view_text(parent, title, textFile.read()) + return view_text(parent, title, contents) if __name__ == '__main__': -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Aug 1 17:31:48 2011 From: python-checkins at python.org (eric.araujo) Date: Mon, 01 Aug 2011 17:31:48 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=28merge_3=2E2_-=3E_default=29?= =?utf8?q?=3A_Merge_=2312295_fix_from_3=2E2?= Message-ID: http://hg.python.org/cpython/rev/7af576e3cb0c changeset: 71678:7af576e3cb0c parent: 71676:4813fe208e29 parent: 71677:63bd8f42b511 user: ?ric Araujo date: Mon Aug 01 17:31:12 2011 +0200 summary: Merge #12295 fix from 3.2 files: Lib/idlelib/textView.py | 5 +++-- 1 files changed, 3 insertions(+), 2 deletions(-) diff --git a/Lib/idlelib/textView.py b/Lib/idlelib/textView.py --- a/Lib/idlelib/textView.py +++ b/Lib/idlelib/textView.py @@ -62,14 +62,15 @@ def view_file(parent, title, filename, encoding=None): try: - textFile = open(filename, 'r', encoding=encoding) + with open(filename, 'r', encoding=encoding) as file: + contents = file.read() except IOError: import tkinter.messagebox as tkMessageBox tkMessageBox.showerror(title='File Load Error', message='Unable to load file %r .' % filename, parent=parent) else: - return view_text(parent, title, textFile.read()) + return view_text(parent, title, contents) if __name__ == '__main__': -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Aug 1 22:59:21 2011 From: python-checkins at python.org (georg.brandl) Date: Mon, 01 Aug 2011 22:59:21 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=283=2E2=29=3A_Use_attribute_a?= =?utf8?q?ccess_instead_of_index_access_for_namedtuple=2E?= Message-ID: http://hg.python.org/cpython/rev/1f9ca1819d7c changeset: 71679:1f9ca1819d7c branch: 3.2 parent: 71677:63bd8f42b511 user: Georg Brandl date: Mon Aug 01 22:58:53 2011 +0200 summary: Use attribute access instead of index access for namedtuple. files: Doc/library/stat.rst | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Doc/library/stat.rst b/Doc/library/stat.rst --- a/Doc/library/stat.rst +++ b/Doc/library/stat.rst @@ -87,7 +87,7 @@ for f in os.listdir(top): pathname = os.path.join(top, f) - mode = os.stat(pathname)[ST_MODE] + mode = os.stat(pathname).st_mode if S_ISDIR(mode): # It's a directory, recurse into it walktree(pathname, callback) -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Aug 1 22:59:24 2011 From: python-checkins at python.org (georg.brandl) Date: Mon, 01 Aug 2011 22:59:24 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=28merge_3=2E2_-=3E_default=29?= =?utf8?q?=3A_Merge_with_3=2E2?= Message-ID: http://hg.python.org/cpython/rev/283ff2609046 changeset: 71680:283ff2609046 parent: 71678:7af576e3cb0c parent: 71679:1f9ca1819d7c user: Georg Brandl date: Mon Aug 01 22:59:40 2011 +0200 summary: Merge with 3.2 files: Doc/library/stat.rst | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Doc/library/stat.rst b/Doc/library/stat.rst --- a/Doc/library/stat.rst +++ b/Doc/library/stat.rst @@ -87,7 +87,7 @@ for f in os.listdir(top): pathname = os.path.join(top, f) - mode = os.stat(pathname)[ST_MODE] + mode = os.stat(pathname).st_mode if S_ISDIR(mode): # It's a directory, recurse into it walktree(pathname, callback) -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Aug 1 22:59:25 2011 From: python-checkins at python.org (georg.brandl) Date: Mon, 01 Aug 2011 22:59:25 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=282=2E7=29=3A_Use_attribute_a?= =?utf8?q?ccess_instead_of_index_access_for_namedtuple=2E?= Message-ID: http://hg.python.org/cpython/rev/5acd6ca6c41a changeset: 71681:5acd6ca6c41a branch: 2.7 parent: 71675:8065f927b4f6 user: Georg Brandl date: Mon Aug 01 22:58:53 2011 +0200 summary: Use attribute access instead of index access for namedtuple. files: Doc/library/stat.rst | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Doc/library/stat.rst b/Doc/library/stat.rst --- a/Doc/library/stat.rst +++ b/Doc/library/stat.rst @@ -84,7 +84,7 @@ for f in os.listdir(top): pathname = os.path.join(top, f) - mode = os.stat(pathname)[ST_MODE] + mode = os.stat(pathname).st_mode if S_ISDIR(mode): # It's a directory, recurse into it walktree(pathname, callback) -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Tue Aug 2 00:16:42 2011 From: python-checkins at python.org (nadeem.vawda) Date: Tue, 02 Aug 2011 00:16:42 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Issue_=2311651=3A_Move_opti?= =?utf8?q?ons_for_running_tests_into_a_Python_script=2E?= Message-ID: http://hg.python.org/cpython/rev/c68a80779434 changeset: 71682:c68a80779434 parent: 71680:283ff2609046 user: Nadeem Vawda date: Mon Aug 01 23:48:26 2011 +0200 summary: Issue #11651: Move options for running tests into a Python script. This will be particularly useful to Windows users. run_tests.py originally written by Brett Cannon. files: Lib/test/regrtest.py | 7 ++++ Makefile.pre.in | 32 +++++++++--------- Misc/NEWS | 7 ++++ Tools/scripts/run_tests.py | 45 ++++++++++++++++++++++++++ 4 files changed, 75 insertions(+), 16 deletions(-) diff --git a/Lib/test/regrtest.py b/Lib/test/regrtest.py --- a/Lib/test/regrtest.py +++ b/Lib/test/regrtest.py @@ -133,6 +133,8 @@ all - Enable all special resources. + none - Disable all special resources (this is the default). + audio - Tests that use the audio device. (There are known cases of broken audio drivers that can crash Python or even the Linux kernel.) @@ -387,6 +389,9 @@ if r == 'all': use_resources[:] = RESOURCE_NAMES continue + if r == 'none': + del use_resources[:] + continue remove = False if r[0] == '-': remove = True @@ -424,6 +429,8 @@ use_mp = 2 + multiprocessing.cpu_count() except (ImportError, NotImplementedError): use_mp = 3 + if use_mp == 1: + use_mp = None elif o == '--header': header = True elif o == '--slaveargs': diff --git a/Makefile.pre.in b/Makefile.pre.in --- a/Makefile.pre.in +++ b/Makefile.pre.in @@ -747,14 +747,15 @@ ###################################################################### +TESTOPTS= $(EXTRATESTOPTS) +TESTPYTHON= $(RUNSHARED) ./$(BUILDPYTHON) $(TESTPYTHONOPTS) +TESTRUNNER= $(TESTPYTHON) $(srcdir)/Tools/scripts/run_tests.py +TESTTIMEOUT= 3600 + # Run a basic set of regression tests. # This excludes some tests that are particularly resource-intensive. -TESTOPTS= $(EXTRATESTOPTS) -TESTPROG= $(srcdir)/Lib/test/regrtest.py -TESTPYTHON= $(RUNSHARED) ./$(BUILDPYTHON) -Wd -E -bb $(TESTPYTHONOPTS) -TESTTIMEOUT= 3600 test: all platform - $(TESTPYTHON) $(TESTPROG) -j0 $(TESTOPTS) + $(TESTRUNNER) $(TESTOPTS) # Run the full test suite twice - once without .pyc files, and once with. # In the past, we've had problems where bugs in the marshalling or @@ -765,10 +766,10 @@ # sample data. testall: all platform -find $(srcdir)/Lib -name '*.py[co]' -print | xargs rm -f - $(TESTPYTHON) $(srcdir)/Lib/compileall.py + $(TESTPYTHON) -E $(srcdir)/Lib/compileall.py -find $(srcdir)/Lib -name '*.py[co]' -print | xargs rm -f - -$(TESTPYTHON) $(TESTPROG) -j0 -uall $(TESTOPTS) - $(TESTPYTHON) $(TESTPROG) -j0 -uall $(TESTOPTS) + -$(TESTRUNNER) -u all $(TESTOPTS) + $(TESTRUNNER) -u all $(TESTOPTS) # Run the test suite for both architectures in a Universal build on OSX. # Must be run on an Intel box. @@ -777,25 +778,24 @@ echo "This can only be used on OSX/i386" ;\ exit 1 ;\ fi - $(TESTPYTHON) $(TESTPROG) -j0 -uall $(TESTOPTS) - $(RUNSHARED) /usr/libexec/oah/translate ./$(BUILDPYTHON) -E $(TESTPROG) -j0 -uall $(TESTOPTS) + $(TESTRUNNER) -u all $(TESTOPTS) + $(RUNSHARED) /usr/libexec/oah/translate \ + ./$(BUILDPYTHON) -E -m test -j 0 -u all $(TESTOPTS) -# Like testall, but with only one pass. +# Like testall, but with only one pass and without multiple processes. # Run an optional script to include information about the build environment. buildbottest: all platform - at if which pybuildbot.identify >/dev/null 2>&1; then \ pybuildbot.identify "CC='$(CC)'" "CXX='$(CXX)'"; \ fi - $(TESTPYTHON) $(TESTPROG) -uall -rwW --timeout=$(TESTTIMEOUT) $(TESTOPTS) + $(TESTRUNNER) -j 1 -u all -W --timeout=$(TESTTIMEOUT) $(TESTOPTS) QUICKTESTOPTS= $(TESTOPTS) -x test_subprocess test_io test_lib2to3 \ test_multibytecodec test_urllib2_localnet test_itertools \ test_multiprocessing test_mailbox test_socket test_poll \ - test_select test_zipfile + test_select test_zipfile test_concurrent_futures quicktest: all platform - -find $(srcdir)/Lib -name '*.py[co]' -print | xargs rm -f - -$(TESTPYTHON) $(TESTPROG) -j0 $(QUICKTESTOPTS) - $(TESTPYTHON) $(TESTPROG) -j0 $(QUICKTESTOPTS) + $(TESTRUNNER) $(QUICKTESTOPTS) install: altinstall bininstall diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -1147,6 +1147,13 @@ Tests ----- +- Issue #11651: Improve the Makefile test targets to run more of the test suite + more quickly. The --multiprocess option is now enabled by default, reducing + the amount of time needed to run the tests. "make test" and "make quicktest" + now include some resource-intensive tests, but no longer run the test suite + twice to check for bugs in .pyc generation. Tools/scripts/run_test.py provides + as an easy platform-independent way to run test suite with sensible defaults. + - Issue #12331: The test suite for the packaging module can now run from an installed Python. diff --git a/Tools/scripts/run_tests.py b/Tools/scripts/run_tests.py new file mode 100755 --- /dev/null +++ b/Tools/scripts/run_tests.py @@ -0,0 +1,45 @@ +"""Run Python's test suite in a fast, rigorous way. + +The defaults are meant to be thorough but to skip certain resources are not +used (by default) which can consume a lot of time and resources (e.g., +largefile) or can be distracting (e.g., audio and gui). These defaults +can be overridden by simply passing a -u option to this script. + +""" + +import os +import sys +import test.support + + +def is_multiprocess_flag(arg): + return arg.startswith('-j') or arg.startswith('--multiprocess') + + +def is_resource_use_flag(arg): + return arg.startswith('-u') or arg.startswith('--use') + + +def main(regrtest_args): + args = [sys.executable, + '-W', 'default', # Warnings set to 'default' + '-bb', # Warnings about bytes/bytearray + '-E', # Ignore environment variables + ] + # Allow user-specified interpreter options to override our defaults. + args.extend(test.support.args_from_interpreter_flags()) + args.extend(['-m', 'test', # Run the test suite + '-r', # Randomize test order + '-w', # Re-run failed tests in verbose mode + ]) + if not any(is_multiprocess_flag(arg) for arg in regrtest_args): + args.extend(['-j', '0']) # Use all CPU cores + if not any(is_resource_use_flag(arg) for arg in regrtest_args): + args.extend(['-u', 'all,-largefile,-audio,-gui']) + args.extend(regrtest_args) + print(' '.join(args)) + os.execv(sys.executable, args) + + +if __name__ == '__main__': + main(sys.argv[1:]) -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Tue Aug 2 01:22:03 2011 From: python-checkins at python.org (stefan.krah) Date: Tue, 02 Aug 2011 01:22:03 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=283=2E2=29=3A_Skip_test=5Fget?= =?utf8?q?setlocale=5Fissue1813=28=29_on_Fedora_due_to_setlocale=28=29_bug?= =?utf8?q?=2E?= Message-ID: http://hg.python.org/cpython/rev/68b5f87566fb changeset: 71683:68b5f87566fb branch: 3.2 parent: 71679:1f9ca1819d7c user: Stefan Krah date: Tue Aug 02 01:06:16 2011 +0200 summary: Skip test_getsetlocale_issue1813() on Fedora due to setlocale() bug. See: https://bugzilla.redhat.com/show_bug.cgi?id=726536 files: Lib/test/test_locale.py | 3 +++ 1 files changed, 3 insertions(+), 0 deletions(-) diff --git a/Lib/test/test_locale.py b/Lib/test/test_locale.py --- a/Lib/test/test_locale.py +++ b/Lib/test/test_locale.py @@ -1,4 +1,5 @@ from test.support import run_unittest, verbose +from platform import linux_distribution import unittest import locale import sys @@ -391,6 +392,8 @@ # crasher from bug #7419 self.assertRaises(locale.Error, locale.setlocale, 12345) + @unittest.skipIf(linux_distribution()[0] == 'Fedora', "Fedora setlocale() " + "bug: https://bugzilla.redhat.com/show_bug.cgi?id=726536") def test_getsetlocale_issue1813(self): # Issue #1813: setting and getting the locale under a Turkish locale oldlocale = locale.setlocale(locale.LC_CTYPE) -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Tue Aug 2 01:22:03 2011 From: python-checkins at python.org (stefan.krah) Date: Tue, 02 Aug 2011 01:22:03 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=28merge_3=2E2_-=3E_default=29?= =?utf8?q?=3A_Merge_68b5f87566fb?= Message-ID: http://hg.python.org/cpython/rev/d168f439a14f changeset: 71684:d168f439a14f parent: 71682:c68a80779434 parent: 71683:68b5f87566fb user: Stefan Krah date: Tue Aug 02 01:12:16 2011 +0200 summary: Merge 68b5f87566fb files: Lib/test/test_locale.py | 3 +++ 1 files changed, 3 insertions(+), 0 deletions(-) diff --git a/Lib/test/test_locale.py b/Lib/test/test_locale.py --- a/Lib/test/test_locale.py +++ b/Lib/test/test_locale.py @@ -1,4 +1,5 @@ from test.support import run_unittest, verbose +from platform import linux_distribution import unittest import locale import sys @@ -391,6 +392,8 @@ # crasher from bug #7419 self.assertRaises(locale.Error, locale.setlocale, 12345) + @unittest.skipIf(linux_distribution()[0] == 'Fedora', "Fedora setlocale() " + "bug: https://bugzilla.redhat.com/show_bug.cgi?id=726536") def test_getsetlocale_issue1813(self): # Issue #1813: setting and getting the locale under a Turkish locale oldlocale = locale.setlocale(locale.LC_CTYPE) -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Tue Aug 2 01:22:04 2011 From: python-checkins at python.org (stefan.krah) Date: Tue, 02 Aug 2011 01:22:04 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=282=2E7=29=3A_Backport_68b5f8?= =?utf8?q?7566fb?= Message-ID: http://hg.python.org/cpython/rev/e8634ccd0c38 changeset: 71685:e8634ccd0c38 branch: 2.7 parent: 71681:5acd6ca6c41a user: Stefan Krah date: Tue Aug 02 01:17:48 2011 +0200 summary: Backport 68b5f87566fb files: Lib/test/test_locale.py | 3 +++ 1 files changed, 3 insertions(+), 0 deletions(-) diff --git a/Lib/test/test_locale.py b/Lib/test/test_locale.py --- a/Lib/test/test_locale.py +++ b/Lib/test/test_locale.py @@ -1,4 +1,5 @@ from test.test_support import run_unittest, verbose +from platform import linux_distribution import unittest import locale import sys @@ -396,6 +397,8 @@ # crasher from bug #7419 self.assertRaises(locale.Error, locale.setlocale, 12345) + @unittest.skipIf(linux_distribution()[0] == 'Fedora', "Fedora setlocale() " + "bug: https://bugzilla.redhat.com/show_bug.cgi?id=726536") def test_getsetlocale_issue1813(self): # Issue #1813: setting and getting the locale under a Turkish locale oldlocale = locale.getlocale() -- Repository URL: http://hg.python.org/cpython From tjreedy at udel.edu Tue Aug 2 00:46:11 2011 From: tjreedy at udel.edu (Terry Reedy) Date: Mon, 01 Aug 2011 18:46:11 -0400 Subject: [Python-checkins] cpython: Issue #11651: Move options for running tests into a Python script. In-Reply-To: References: Message-ID: <4E372CB3.60408@udel.edu> On 8/1/2011 6:16 PM, nadeem.vawda wrote: > http://hg.python.org/cpython/rev/c68a80779434 > changeset: 71682:c68a80779434 > parent: 71680:283ff2609046 > user: Nadeem Vawda > date: Mon Aug 01 23:48:26 2011 +0200 > +++ b/Tools/scripts/run_tests.py > @@ -0,0 +1,45 @@ > +"""Run Python's test suite in a fast, rigorous way. > + > +The defaults are meant to be thorough but to skip certain resources are not > +used (by default) which can consume a lot of time and resources (e.g., > +largefile) or can be distracting (e.g., audio and gui). This sentence is a bit garbled. I believe you meant something more lie: "The defaults are meant to be thorough but to skip certain tests that can be either time or resource hogs (e.g., largefile) or distracting (e.g., audio and gui). Terry From rdmurray at bitdance.com Tue Aug 2 04:22:20 2011 From: rdmurray at bitdance.com (R. David Murray) Date: Mon, 01 Aug 2011 22:22:20 -0400 Subject: [Python-checkins] =?utf8?q?cpython_=283=2E2=29=3A_Skip_test=5Fget?= =?utf8?q?setlocale=5Fissue1813=28=29_on_Fedora_due_to_setlocale=28?= =?utf8?b?KSBidWcu?= In-Reply-To: References: Message-ID: <20110802022221.9FE582506C6@webabinitio.net> On Tue, 02 Aug 2011 01:22:03 +0200, stefan.krah wrote: > http://hg.python.org/cpython/rev/68b5f87566fb > changeset: 71683:68b5f87566fb > branch: 3.2 > parent: 71679:1f9ca1819d7c > user: Stefan Krah > date: Tue Aug 02 01:06:16 2011 +0200 > summary: > Skip test_getsetlocale_issue1813() on Fedora due to setlocale() bug. > See: https://bugzilla.redhat.com/show_bug.cgi?id=726536 > > files: > Lib/test/test_locale.py | 3 +++ > 1 files changed, 3 insertions(+), 0 deletions(-) > > > diff --git a/Lib/test/test_locale.py b/Lib/test/test_locale.py > --- a/Lib/test/test_locale.py > +++ b/Lib/test/test_locale.py > @@ -1,4 +1,5 @@ > from test.support import run_unittest, verbose > +from platform import linux_distribution > import unittest > import locale > import sys > @@ -391,6 +392,8 @@ > # crasher from bug #7419 > self.assertRaises(locale.Error, locale.setlocale, 12345) > > + @unittest.skipIf(linux_distribution()[0] == 'Fedora', "Fedora setlocale() " > + "bug: https://bugzilla.redhat.com/show_bug.cgi?id=726536") > def test_getsetlocale_issue1813(self): > # Issue #1813: setting and getting the locale under a Turkish locale > oldlocale = locale.setlocale(locale.LC_CTYPE) Why 'Fedora'? This bug affects more than just Fedora: as I reported on the issue, I'm seeing it on Gentoo as well. (Also, including the issue number in the commit message is helpful). Note that since the bug report says that "Gentoo has been including this fix for two years", the fact that it is failing on my Gentoo system would seem to indicate that something about the fix is not right. So, I'm not sure this skip is even valid. I'm not sure we've finished diagnosing the bug. If there are any helpful tests I can run on Gentoo, please let me know. -- R. David Murray http://www.bitdance.com From solipsis at pitrou.net Tue Aug 2 05:26:31 2011 From: solipsis at pitrou.net (solipsis at pitrou.net) Date: Tue, 02 Aug 2011 05:26:31 +0200 Subject: [Python-checkins] Daily reference leaks (d168f439a14f): sum=0 Message-ID: results for d168f439a14f on branch "default" -------------------------------------------- Command line was: ['./python', '-m', 'test.regrtest', '-uall', '-R', '3:3:/home/antoine/cpython/refleaks/reflogIXwQIX', '-x'] From python-checkins at python.org Tue Aug 2 05:27:26 2011 From: python-checkins at python.org (eli.bendersky) Date: Tue, 02 Aug 2011 05:27:26 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Issue_=2311049=3A_fix_test?= =?utf8?q?=5Fforget_to_work_on_installed_Python=2C_by_using_a_temporary?= Message-ID: http://hg.python.org/cpython/rev/1a978892a105 changeset: 71686:1a978892a105 parent: 71684:d168f439a14f user: Eli Bendersky date: Tue Aug 02 06:24:31 2011 +0300 summary: Issue #11049: fix test_forget to work on installed Python, by using a temporary module for import/forget files: Lib/test/test_support.py | 14 +++++++++++--- 1 files changed, 11 insertions(+), 3 deletions(-) diff --git a/Lib/test/test_support.py b/Lib/test/test_support.py --- a/Lib/test/test_support.py +++ b/Lib/test/test_support.py @@ -55,9 +55,17 @@ support.rmtree(TESTDIRN) def test_forget(self): - import smtplib - support.forget("smtplib") - self.assertNotIn("smtplib", sys.modules) + mod_filename = TESTFN + '.py' + with open(mod_filename, 'w') as f: + print('foo = 1', file=f) + try: + mod = __import__(TESTFN) + self.assertIn(TESTFN, sys.modules) + + support.forget(TESTFN) + self.assertNotIn(TESTFN, sys.modules) + finally: + support.unlink(mod_filename) def test_HOST(self): s = socket.socket() -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Tue Aug 2 10:16:54 2011 From: python-checkins at python.org (nadeem.vawda) Date: Tue, 02 Aug 2011 10:16:54 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Clean_up_description_of_Too?= =?utf8?q?ls/scripts/run=5Ftests=2Epy=2E?= Message-ID: http://hg.python.org/cpython/rev/0b52b6f1bfab changeset: 71687:0b52b6f1bfab user: Nadeem Vawda date: Tue Aug 02 10:16:45 2011 +0200 summary: Clean up description of Tools/scripts/run_tests.py. Also, add an entry for the script in Tools/scripts/README. files: Tools/scripts/README | 3 ++- Tools/scripts/run_tests.py | 8 ++++---- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/Tools/scripts/README b/Tools/scripts/README --- a/Tools/scripts/README +++ b/Tools/scripts/README @@ -15,7 +15,7 @@ diff.py Print file diffs in context, unified, or ndiff formats dutree.py Format du(1) output as a tree sorted by size eptags.py Create Emacs TAGS file for Python modules -find_recursionlimit.py Find the maximum recursion limit on this machine +find_recursionlimit.py Find the maximum recursion limit on this machine finddiv.py A grep-like tool that looks for division operators findlinksto.py Recursively find symbolic links to a given path prefix findnocoding.py Find source files which need an encoding declaration @@ -53,6 +53,7 @@ reindent.py Change .py files to use 4-space indents reindent-rst.py Fix-up reStructuredText file whitespace rgrep.py Reverse grep through a file (useful for big logfiles) +run_tests.py Run the test suite with more sensible default options serve.py Small wsgiref-based web server, used in make serve in Doc suff.py Sort a list of files by suffix svneol.py Set svn:eol-style on all files in directory diff --git a/Tools/scripts/run_tests.py b/Tools/scripts/run_tests.py --- a/Tools/scripts/run_tests.py +++ b/Tools/scripts/run_tests.py @@ -1,9 +1,9 @@ """Run Python's test suite in a fast, rigorous way. -The defaults are meant to be thorough but to skip certain resources are not -used (by default) which can consume a lot of time and resources (e.g., -largefile) or can be distracting (e.g., audio and gui). These defaults -can be overridden by simply passing a -u option to this script. +The defaults are meant to be reasonably thorough, while skipping certain +tests that can be time-consuming or resource-intensive (e.g. largefile), +or distracting (e.g. audio and gui). These defaults can be overridden by +simply passing a -u option to this script. """ -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Tue Aug 2 12:33:55 2011 From: python-checkins at python.org (senthil.kumaran) Date: Tue, 02 Aug 2011 12:33:55 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=283=2E2=29=3A_Fix_closes_Issu?= =?utf8?q?e12676_-_Invalid_identifier_used_in_TypeError_message_in?= Message-ID: http://hg.python.org/cpython/rev/1013c9fbd83c changeset: 71688:1013c9fbd83c branch: 3.2 parent: 71683:68b5f87566fb user: Senthil Kumaran date: Tue Aug 02 18:33:41 2011 +0800 summary: Fix closes Issue12676 - Invalid identifier used in TypeError message in http.client. Reported by Popa Claudiu and Patch by Santoso Wijaya. files: Lib/http/client.py | 2 +- Lib/test/test_httplib.py | 7 +++++++ 2 files changed, 8 insertions(+), 1 deletions(-) diff --git a/Lib/http/client.py b/Lib/http/client.py --- a/Lib/http/client.py +++ b/Lib/http/client.py @@ -778,7 +778,7 @@ self.sock.sendall(d) else: raise TypeError("data should be a bytes-like object\ - or an iterable, got %r " % type(it)) + or an iterable, got %r " % type(data)) def _output(self, s): """Add a line of output to the current request buffer. diff --git a/Lib/test/test_httplib.py b/Lib/test/test_httplib.py --- a/Lib/test/test_httplib.py +++ b/Lib/test/test_httplib.py @@ -246,6 +246,13 @@ conn.request('GET', '/foo', body(), {'Content-Length': '11'}) self.assertEqual(sock.data, expected) + def test_send_type_error(self): + # See: Issue #12676 + conn = client.HTTPConnection('example.com') + conn.sock = FakeSocket('') + with self.assertRaises(TypeError): + conn.request('POST', 'test', conn) + def test_chunked(self): chunked_start = ( 'HTTP/1.1 200 OK\r\n' -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Tue Aug 2 12:35:07 2011 From: python-checkins at python.org (senthil.kumaran) Date: Tue, 02 Aug 2011 12:35:07 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=28merge_3=2E2_-=3E_default=29?= =?utf8?q?=3A_Fix_closes_Issue12676_-_Invalid_identifier_used_in_TypeError?= =?utf8?q?_message_in?= Message-ID: http://hg.python.org/cpython/rev/c099ba0a278e changeset: 71689:c099ba0a278e parent: 71687:0b52b6f1bfab parent: 71688:1013c9fbd83c user: Senthil Kumaran date: Tue Aug 02 18:34:53 2011 +0800 summary: Fix closes Issue12676 - Invalid identifier used in TypeError message in http.client. Reported by Popa Claudiu and Patch by Santoso Wijaya. files: Lib/http/client.py | 2 +- Lib/test/test_httplib.py | 7 +++++++ 2 files changed, 8 insertions(+), 1 deletions(-) diff --git a/Lib/http/client.py b/Lib/http/client.py --- a/Lib/http/client.py +++ b/Lib/http/client.py @@ -778,7 +778,7 @@ self.sock.sendall(d) else: raise TypeError("data should be a bytes-like object\ - or an iterable, got %r " % type(it)) + or an iterable, got %r " % type(data)) def _output(self, s): """Add a line of output to the current request buffer. diff --git a/Lib/test/test_httplib.py b/Lib/test/test_httplib.py --- a/Lib/test/test_httplib.py +++ b/Lib/test/test_httplib.py @@ -246,6 +246,13 @@ conn.request('GET', '/foo', body(), {'Content-Length': '11'}) self.assertEqual(sock.data, expected) + def test_send_type_error(self): + # See: Issue #12676 + conn = client.HTTPConnection('example.com') + conn.sock = FakeSocket('') + with self.assertRaises(TypeError): + conn.request('POST', 'test', conn) + def test_chunked(self): chunked_start = ( 'HTTP/1.1 200 OK\r\n' -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Tue Aug 2 12:53:21 2011 From: python-checkins at python.org (senthil.kumaran) Date: Tue, 02 Aug 2011 12:53:21 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=282=2E7=29=3A_Fix_closes_Issu?= =?utf8?q?e12183_-_Explain_the_Symlink_copy_behavior_in_shutil=2Ecopytree?= =?utf8?q?=2E?= Message-ID: http://hg.python.org/cpython/rev/209ad8920b03 changeset: 71690:209ad8920b03 branch: 2.7 parent: 71685:e8634ccd0c38 user: Senthil Kumaran date: Tue Aug 02 18:50:44 2011 +0800 summary: Fix closes Issue12183 - Explain the Symlink copy behavior in shutil.copytree. Patch by Petri Lehtinen. files: Doc/library/shutil.rst | 5 +++-- 1 files changed, 3 insertions(+), 2 deletions(-) diff --git a/Doc/library/shutil.rst b/Doc/library/shutil.rst --- a/Doc/library/shutil.rst +++ b/Doc/library/shutil.rst @@ -105,8 +105,9 @@ :func:`copy2`. If *symlinks* is true, symbolic links in the source tree are represented as - symbolic links in the new tree; if false or omitted, the contents of the - linked files are copied to the new tree. + symbolic links in the new tree, but the metadata of the original links is NOT + copied; if false or omitted, the contents and metadata of the linked files + are copied to the new tree. If *ignore* is given, it must be a callable that will receive as its arguments the directory being visited by :func:`copytree`, and a list of its -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Tue Aug 2 12:55:22 2011 From: python-checkins at python.org (senthil.kumaran) Date: Tue, 02 Aug 2011 12:55:22 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=283=2E2=29=3A_Fix_closes_Issu?= =?utf8?q?e12183_-_Explain_the_Symlink_copy_behavior_in_shutil=2Ecopytree?= =?utf8?q?=2E?= Message-ID: http://hg.python.org/cpython/rev/299992730c87 changeset: 71691:299992730c87 branch: 3.2 parent: 71688:1013c9fbd83c user: Senthil Kumaran date: Tue Aug 02 18:52:28 2011 +0800 summary: Fix closes Issue12183 - Explain the Symlink copy behavior in shutil.copytree. Patch by Petri Lehtinen. files: Doc/library/shutil.rst | 5 +++-- 1 files changed, 3 insertions(+), 2 deletions(-) diff --git a/Doc/library/shutil.rst b/Doc/library/shutil.rst --- a/Doc/library/shutil.rst +++ b/Doc/library/shutil.rst @@ -101,8 +101,9 @@ :func:`copy2`. If *symlinks* is true, symbolic links in the source tree are represented as - symbolic links in the new tree; if false or omitted, the contents of the - linked files are copied to the new tree. + symbolic links in the new tree, but the metadata of the original links is NOT + copied; if false or omitted, the contents and metadata of the linked files + are copied to the new tree. When *symlinks* is false, if the file pointed by the symlink doesn't exist, a exception will be added in the list of errors raised in -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Tue Aug 2 14:20:52 2011 From: python-checkins at python.org (jason.coombs) Date: Tue, 02 Aug 2011 14:20:52 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Corrected_attribute_docstri?= =?utf8?q?ng_per_pep-257_=28reference_=2310639=29?= Message-ID: http://hg.python.org/cpython/rev/dc96af0e7f60 changeset: 71692:dc96af0e7f60 parent: 71689:c099ba0a278e user: Jason R. Coombs date: Tue Aug 02 08:19:31 2011 -0400 summary: Corrected attribute docstring per pep-257 (reference #10639) files: Tools/scripts/reindent.py | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Tools/scripts/reindent.py b/Tools/scripts/reindent.py --- a/Tools/scripts/reindent.py +++ b/Tools/scripts/reindent.py @@ -52,8 +52,8 @@ recurse = False dryrun = False makebackup = True -"A specified newline to be used in the output (set by --newline option)" spec_newline = None +"""A specified newline to be used in the output (set by --newline option)""" def usage(msg=None): -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Tue Aug 2 14:58:38 2011 From: python-checkins at python.org (antoine.pitrou) Date: Tue, 02 Aug 2011 14:58:38 +0200 Subject: [Python-checkins] =?utf8?q?devguide=3A_Add_Sandro_to_the_list_of_?= =?utf8?q?core_developers?= Message-ID: http://hg.python.org/devguide/rev/2783106b0ccc changeset: 438:2783106b0ccc user: Antoine Pitrou date: Tue Aug 02 14:56:56 2011 +0200 summary: Add Sandro to the list of core developers files: developers.rst | 4 ++++ 1 files changed, 4 insertions(+), 0 deletions(-) diff --git a/developers.rst b/developers.rst --- a/developers.rst +++ b/developers.rst @@ -24,6 +24,10 @@ Permissions History ------------------- +- Sandro Tosi was given push privileges on Aug 1 2011 by Antoine Pitrou, + for documentation and other contributions, on recommendation by Ezio + Melotti, R. David Murray and others. + - Charles-Fran?ois Natali was given push privileges on May 19 2011 by Antoine Pitrou, for general contributions, on recommandation by Victor Stinner, Brian Curtin and others. -- Repository URL: http://hg.python.org/devguide From python-checkins at python.org Tue Aug 2 16:42:39 2011 From: python-checkins at python.org (ezio.melotti) Date: Tue, 02 Aug 2011 16:42:39 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=28merge_3=2E2_-=3E_default=29?= =?utf8?q?=3A_=2312183=3A_merge_with_3=2E2=2E?= Message-ID: http://hg.python.org/cpython/rev/746dc0a2398e changeset: 71693:746dc0a2398e parent: 71692:dc96af0e7f60 parent: 71691:299992730c87 user: Ezio Melotti date: Tue Aug 02 17:42:24 2011 +0300 summary: #12183: merge with 3.2. files: Doc/library/shutil.rst | 5 +++-- 1 files changed, 3 insertions(+), 2 deletions(-) diff --git a/Doc/library/shutil.rst b/Doc/library/shutil.rst --- a/Doc/library/shutil.rst +++ b/Doc/library/shutil.rst @@ -101,8 +101,9 @@ :func:`copy2`. If *symlinks* is true, symbolic links in the source tree are represented as - symbolic links in the new tree; if false or omitted, the contents of the - linked files are copied to the new tree. + symbolic links in the new tree, but the metadata of the original links is NOT + copied; if false or omitted, the contents and metadata of the linked files + are copied to the new tree. When *symlinks* is false, if the file pointed by the symlink doesn't exist, a exception will be added in the list of errors raised in -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Tue Aug 2 16:47:56 2011 From: python-checkins at python.org (sandro.tosi) Date: Tue, 02 Aug 2011 16:47:56 +0200 Subject: [Python-checkins] =?utf8?b?Y3B5dGhvbiAoMi43KTogIzEyNjcwOiBGaXgg?= =?utf8?q?struct_code_after_forward_declaration_on_ctypes_doc?= Message-ID: http://hg.python.org/cpython/rev/2aa8dd4df650 changeset: 71694:2aa8dd4df650 branch: 2.7 parent: 71690:209ad8920b03 user: Sandro Tosi date: Tue Aug 02 16:16:11 2011 +0200 summary: #12670: Fix struct code after forward declaration on ctypes doc files: Doc/library/ctypes.rst | 4 ++-- 1 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Doc/library/ctypes.rst b/Doc/library/ctypes.rst --- a/Doc/library/ctypes.rst +++ b/Doc/library/ctypes.rst @@ -869,10 +869,10 @@ struct cell; /* forward declaration */ - struct { + struct cell { char *name; struct cell *next; - } cell; + }; The straightforward translation into ctypes code would be this, but it does not work:: -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Tue Aug 2 16:47:57 2011 From: python-checkins at python.org (sandro.tosi) Date: Tue, 02 Aug 2011 16:47:57 +0200 Subject: [Python-checkins] =?utf8?b?Y3B5dGhvbiAoMy4yKTogIzEyNjcwOiBGaXgg?= =?utf8?q?struct_code_after_forward_declaration_on_ctypes_doc?= Message-ID: http://hg.python.org/cpython/rev/25dd1d3f4b88 changeset: 71695:25dd1d3f4b88 branch: 3.2 parent: 71691:299992730c87 user: Sandro Tosi date: Tue Aug 02 16:17:14 2011 +0200 summary: #12670: Fix struct code after forward declaration on ctypes doc files: Doc/library/ctypes.rst | 4 ++-- 1 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Doc/library/ctypes.rst b/Doc/library/ctypes.rst --- a/Doc/library/ctypes.rst +++ b/Doc/library/ctypes.rst @@ -865,10 +865,10 @@ struct cell; /* forward declaration */ - struct { + struct cell { char *name; struct cell *next; - } cell; + }; The straightforward translation into ctypes code would be this, but it does not work:: -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Tue Aug 2 16:47:58 2011 From: python-checkins at python.org (sandro.tosi) Date: Tue, 02 Aug 2011 16:47:58 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=28merge_3=2E2_-=3E_default=29?= =?utf8?q?=3A_=2312670=3A_merge_with_3=2E2?= Message-ID: http://hg.python.org/cpython/rev/01192d30365a changeset: 71696:01192d30365a parent: 71693:746dc0a2398e parent: 71695:25dd1d3f4b88 user: Sandro Tosi date: Tue Aug 02 16:44:31 2011 +0200 summary: #12670: merge with 3.2 files: Doc/library/ctypes.rst | 4 ++-- 1 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Doc/library/ctypes.rst b/Doc/library/ctypes.rst --- a/Doc/library/ctypes.rst +++ b/Doc/library/ctypes.rst @@ -865,10 +865,10 @@ struct cell; /* forward declaration */ - struct { + struct cell { char *name; struct cell *next; - } cell; + }; The straightforward translation into ctypes code would be this, but it does not work:: -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Tue Aug 2 18:44:55 2011 From: python-checkins at python.org (sandro.tosi) Date: Tue, 02 Aug 2011 18:44:55 +0200 Subject: [Python-checkins] =?utf8?b?Y3B5dGhvbiAoMy4yKTogIzEyNjY1OiBEaWN0?= =?utf8?q?ionary_view_example_has_error_in_set_operation?= Message-ID: http://hg.python.org/cpython/rev/a70cdec027e7 changeset: 71697:a70cdec027e7 branch: 3.2 parent: 71695:25dd1d3f4b88 user: Sandro Tosi date: Tue Aug 02 18:42:04 2011 +0200 summary: #12665: Dictionary view example has error in set operation files: Doc/library/stdtypes.rst | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Doc/library/stdtypes.rst b/Doc/library/stdtypes.rst --- a/Doc/library/stdtypes.rst +++ b/Doc/library/stdtypes.rst @@ -2318,7 +2318,7 @@ >>> keys & {'eggs', 'bacon', 'salad'} {'bacon'} >>> keys ^ {'sausage', 'juice'} - {'juice', 'eggs', 'bacon', 'spam'} + {'juice', 'sausage', 'bacon', 'spam'} .. _typememoryview: -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Tue Aug 2 18:45:05 2011 From: python-checkins at python.org (sandro.tosi) Date: Tue, 02 Aug 2011 18:45:05 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=28merge_3=2E2_-=3E_default=29?= =?utf8?q?=3A_=2312665=3A_merge_with_3=2E2?= Message-ID: http://hg.python.org/cpython/rev/b2dc821058fe changeset: 71698:b2dc821058fe parent: 71696:01192d30365a parent: 71697:a70cdec027e7 user: Sandro Tosi date: Tue Aug 02 18:43:49 2011 +0200 summary: #12665: merge with 3.2 files: Doc/library/stdtypes.rst | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Doc/library/stdtypes.rst b/Doc/library/stdtypes.rst --- a/Doc/library/stdtypes.rst +++ b/Doc/library/stdtypes.rst @@ -2329,7 +2329,7 @@ >>> keys & {'eggs', 'bacon', 'salad'} {'bacon'} >>> keys ^ {'sausage', 'juice'} - {'juice', 'eggs', 'bacon', 'spam'} + {'juice', 'sausage', 'bacon', 'spam'} .. _typememoryview: -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Tue Aug 2 20:02:42 2011 From: python-checkins at python.org (eric.araujo) Date: Tue, 02 Aug 2011 20:02:42 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=282=2E7=29=3A_Fix_incorrect_m?= =?utf8?q?time_comparison_in_distutils_=28=2311933=29=2E?= Message-ID: http://hg.python.org/cpython/rev/aebe3243bb2c changeset: 71699:aebe3243bb2c branch: 2.7 parent: 71675:8065f927b4f6 user: ?ric Araujo date: Tue Aug 02 03:16:12 2011 +0200 summary: Fix incorrect mtime comparison in distutils (#11933). This is a regression introduced in 9211a5d7d0b4, when uses of ST_MTIME constants were changed to uses of st_mtime attributes. As diagnosed in the bug report, this change is not merely stylistic: st_mtime is a float but ST_MTIME?s resolution is rounded to the seconds, so there was a mismatch between the values seen by file_util and dep_util which caused an sdist to be unnecessarily created a second time on an ext4 filesystem. This patch has been tested by John S. Gruber, who reported the bug. As this is a simple code revert, I think it?s okay to commit without a unit test. files: Lib/distutils/dep_util.py | 7 ++++--- Misc/ACKS | 1 + Misc/NEWS | 2 ++ 3 files changed, 7 insertions(+), 3 deletions(-) diff --git a/Lib/distutils/dep_util.py b/Lib/distutils/dep_util.py --- a/Lib/distutils/dep_util.py +++ b/Lib/distutils/dep_util.py @@ -7,6 +7,7 @@ __revision__ = "$Id$" import os +from stat import ST_MTIME from distutils.errors import DistutilsFileError def newer(source, target): @@ -27,7 +28,7 @@ if not os.path.exists(target): return True - return os.stat(source).st_mtime > os.stat(target).st_mtime + return os.stat(source)[ST_MTIME] > os.stat(target)[ST_MTIME] def newer_pairwise(sources, targets): """Walk two filename lists in parallel, testing if each source is newer @@ -71,7 +72,7 @@ # is more recent than 'target', then 'target' is out-of-date and # we can immediately return true. If we fall through to the end # of the loop, then 'target' is up-to-date and we return false. - target_mtime = os.stat(target).st_mtime + target_mtime = os.stat(target)[ST_MTIME] for source in sources: if not os.path.exists(source): @@ -82,7 +83,7 @@ elif missing == 'newer': # missing source means target is return True # out-of-date - if os.stat(source).st_mtime > target_mtime: + if os.stat(source)[ST_MTIME] > target_mtime: return True return False diff --git a/Misc/ACKS b/Misc/ACKS --- a/Misc/ACKS +++ b/Misc/ACKS @@ -309,6 +309,7 @@ Eddy De Greef Duncan Grisby Fabian Groffen +John S. Gruber Dag Gruneau Filip Gruszczy?ski Michael Guravage diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -37,6 +37,8 @@ Library ------- +- Issue #11933: Fix incorrect mtime comparison in distutils. + - Issues #11104, #8688: Fix the behavior of distutils' sdist command with manually-maintained MANIFEST files. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Tue Aug 2 20:02:43 2011 From: python-checkins at python.org (eric.araujo) Date: Tue, 02 Aug 2011 20:02:43 +0200 Subject: [Python-checkins] =?utf8?b?Y3B5dGhvbiAobWVyZ2UgMi43IC0+IDIuNyk6?= =?utf8?q?_Branch_merge?= Message-ID: http://hg.python.org/cpython/rev/4d39e6f22bef changeset: 71700:4d39e6f22bef branch: 2.7 parent: 71694:2aa8dd4df650 parent: 71699:aebe3243bb2c user: ?ric Araujo date: Tue Aug 02 20:01:54 2011 +0200 summary: Branch merge files: Lib/distutils/dep_util.py | 7 ++++--- Misc/ACKS | 1 + Misc/NEWS | 2 ++ 3 files changed, 7 insertions(+), 3 deletions(-) diff --git a/Lib/distutils/dep_util.py b/Lib/distutils/dep_util.py --- a/Lib/distutils/dep_util.py +++ b/Lib/distutils/dep_util.py @@ -7,6 +7,7 @@ __revision__ = "$Id$" import os +from stat import ST_MTIME from distutils.errors import DistutilsFileError def newer(source, target): @@ -27,7 +28,7 @@ if not os.path.exists(target): return True - return os.stat(source).st_mtime > os.stat(target).st_mtime + return os.stat(source)[ST_MTIME] > os.stat(target)[ST_MTIME] def newer_pairwise(sources, targets): """Walk two filename lists in parallel, testing if each source is newer @@ -71,7 +72,7 @@ # is more recent than 'target', then 'target' is out-of-date and # we can immediately return true. If we fall through to the end # of the loop, then 'target' is up-to-date and we return false. - target_mtime = os.stat(target).st_mtime + target_mtime = os.stat(target)[ST_MTIME] for source in sources: if not os.path.exists(source): @@ -82,7 +83,7 @@ elif missing == 'newer': # missing source means target is return True # out-of-date - if os.stat(source).st_mtime > target_mtime: + if os.stat(source)[ST_MTIME] > target_mtime: return True return False diff --git a/Misc/ACKS b/Misc/ACKS --- a/Misc/ACKS +++ b/Misc/ACKS @@ -309,6 +309,7 @@ Eddy De Greef Duncan Grisby Fabian Groffen +John S. Gruber Dag Gruneau Filip Gruszczy?ski Michael Guravage diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -37,6 +37,8 @@ Library ------- +- Issue #11933: Fix incorrect mtime comparison in distutils. + - Issues #11104, #8688: Fix the behavior of distutils' sdist command with manually-maintained MANIFEST files. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Tue Aug 2 20:06:28 2011 From: python-checkins at python.org (benjamin.peterson) Date: Tue, 02 Aug 2011 20:06:28 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_add_ThreadError_to_threadin?= =?utf8?b?Zy5fX2FsbF9fIChjbG9zZXMgIzEyNjc5KQ==?= Message-ID: http://hg.python.org/cpython/rev/bbeda42ea6a8 changeset: 71701:bbeda42ea6a8 parent: 71696:01192d30365a user: Benjamin Peterson date: Tue Aug 02 13:05:47 2011 -0500 summary: add ThreadError to threading.__all__ (closes #12679) files: Lib/threading.py | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Lib/threading.py b/Lib/threading.py --- a/Lib/threading.py +++ b/Lib/threading.py @@ -20,7 +20,7 @@ __all__ = ['active_count', 'Condition', 'current_thread', 'enumerate', 'Event', 'Lock', 'RLock', 'Semaphore', 'BoundedSemaphore', 'Thread', 'Barrier', - 'Timer', 'setprofile', 'settrace', 'local', 'stack_size'] + 'Timer', 'ThreadError', 'setprofile', 'settrace', 'local', 'stack_size'] # Rename some stuff so "from threading import *" is safe _start_new_thread = _thread.start_new_thread -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Tue Aug 2 20:06:28 2011 From: python-checkins at python.org (benjamin.peterson) Date: Tue, 02 Aug 2011 20:06:28 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=28merge_default_-=3E_default?= =?utf8?q?=29=3A_merge_heads?= Message-ID: http://hg.python.org/cpython/rev/4061fedf5429 changeset: 71702:4061fedf5429 parent: 71701:bbeda42ea6a8 parent: 71698:b2dc821058fe user: Benjamin Peterson date: Tue Aug 02 13:06:19 2011 -0500 summary: merge heads files: Doc/library/stdtypes.rst | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Doc/library/stdtypes.rst b/Doc/library/stdtypes.rst --- a/Doc/library/stdtypes.rst +++ b/Doc/library/stdtypes.rst @@ -2329,7 +2329,7 @@ >>> keys & {'eggs', 'bacon', 'salad'} {'bacon'} >>> keys ^ {'sausage', 'juice'} - {'juice', 'eggs', 'bacon', 'spam'} + {'juice', 'sausage', 'bacon', 'spam'} .. _typememoryview: -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Wed Aug 3 00:30:41 2011 From: python-checkins at python.org (benjamin.peterson) Date: Wed, 03 Aug 2011 00:30:41 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_NEWS_note_for_bbeda42ea6a8?= Message-ID: http://hg.python.org/cpython/rev/7b9dbee2e9f2 changeset: 71703:7b9dbee2e9f2 user: Benjamin Peterson date: Tue Aug 02 17:29:30 2011 -0500 summary: NEWS note for bbeda42ea6a8 files: Misc/NEWS | 2 ++ 1 files changed, 2 insertions(+), 0 deletions(-) diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -10,6 +10,8 @@ Core and Builtins ----------------- +- Add ThreadError to threading.__all__. + - Make type(None), type(Ellipsis), and type(NotImplemented) callable. They return the respective singleton instances. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Wed Aug 3 00:30:42 2011 From: python-checkins at python.org (benjamin.peterson) Date: Wed, 03 Aug 2011 00:30:42 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_expose_sched=2Eh_functions_?= =?utf8?b?KGNsb3NlcyAjMTI2NTUp?= Message-ID: http://hg.python.org/cpython/rev/89e92e684b37 changeset: 71704:89e92e684b37 user: Benjamin Peterson date: Tue Aug 02 17:30:04 2011 -0500 summary: expose sched.h functions (closes #12655) files: Doc/library/os.rst | 149 +++++++ Lib/test/test_posix.py | 133 ++++++- Misc/NEWS | 5 + Modules/posixmodule.c | 584 +++++++++++++++++++++++++++++ configure | 6 +- configure.in | 2 +- pyconfig.h.in | 3 + 7 files changed, 877 insertions(+), 5 deletions(-) diff --git a/Doc/library/os.rst b/Doc/library/os.rst --- a/Doc/library/os.rst +++ b/Doc/library/os.rst @@ -2744,6 +2744,155 @@ Availability: Unix. +Interface to the scheduler +-------------------------- + +These functions control how a process is allocated CPU time by the operating +system. They are only available on some Unix platforms. For more detailed +information, consult your Unix manpages. + +.. versionadded:: 3.3 + +The following scheduling policies are exposed if they are a supported by the +operating system. + +.. data:: SCHED_OTHER + + The default scheduling policy. + +.. data:: SCHED_BATCH + + Scheduling policy for CPU-intensive processes that tries to preserve + interactivity on the rest of the computer. + +.. data:: SCHED_IDLE + + Scheduling policy for extremely low priority background tasks. + +.. data:: SCHED_SPORADIC + + Scheduling policy for sporadic server programs. + +.. data:: SCHED_FIFO + + A First In First Out scheduling policy. + +.. data:: SCHED_RR + + A round-robin scheduling policy. + +.. data:: SCHED_RESET_ON_FORK + + This flag can OR'ed with any other scheduling policy. When a process with + this flag set forks, its child's scheduling policy and priority are reset to + the default. + + +.. class:: sched_param(sched_priority) + + This class represents tunable scheduling parameters used in + :func:`sched_setparam`, :func:`sched_setscheduler`, and + :func:`sched_getparam`. It is immutable. + + At the moment, there is only one possible parameter: + + .. attribute:: sched_priority + + The scheduling priority for a scheduling policy. + + +.. function:: sched_get_priority_min(policy) + + Get the minimum priority value for *policy*. *policy* is one of the + scheduling policy constants above. + + +.. function:: sched_get_priority_max(policy) + + Get the maximum priority value for *policy*. *policy* is one of the + scheduling policy constants above. + + +.. function:: sched_setscheduler(pid, policy, param) + + Set the scheduling policy for the process with PID *pid*. A *pid* of 0 means + the calling process. *policy* is one of the scheduling policy constants + above. *param* is a :class:`sched_param` instance. + + +.. function:: sched_getscheduler(pid) + + Return the scheduling policy for the process with PID *pid*. A *pid* of 0 + means the calling process. The result is one of the scheduling policy + constants above. + + +.. function:: sched_setparam(pid, param) + + Set a scheduling parameters for the process with PID *pid*. A *pid* of 0 means + the calling process. *param* is a :class:`sched_param` instance. + + +.. function:: sched_getparam(pid) + + Return the scheduling parameters as a :class:`sched_param` instance for the + process with PID *pid*. A *pid* of 0 means the calling process. + + +.. function:: sched_rr_get_interval(pid) + + Return the round-robin quantum in seconds for the process with PID *pid*. A + *pid* of 0 means the calling process. + + +.. function:: sched_yield() + + Voluntarily relinquish the CPU. + + +.. class:: cpu_set(ncpus) + + :class:`cpu_set` represents a set of CPUs on which a process is eligible to + run. *ncpus* is the number of CPUs the set should describe. Methods on + :class:`cpu_set` allow CPUs to be add or removed. + + :class:`cpu_set` supports the AND, OR, and XOR bitwise operations. For + example, given two cpu_sets, ``one`` and ``two``, ``one | two`` returns a + :class:`cpu_set` containing the cpus enabled both in ``one`` and ``two``. + + .. method:: set(i) + + Enable CPU *i*. + + .. method:: clear(i) + + Remove CPU *i*. + + .. method:: isset(i) + + Return ``True`` if CPU *i* is enabled in the set. + + .. method:: count() + + Return the number of enabled CPUs in the set. + + .. method:: zero() + + Clear the set completely. + + +.. function:: sched_setaffinity(pid, mask) + + Restrict the process with PID *pid* to a set of CPUs. *mask* is a + :class:`cpu_set` instance. + + +.. function:: sched_getaffinity(pid, size) + + Return the :class:`cpu_set` the process with PID *pid* is restricted to. The + result will contain *size* CPUs. + + .. _os-path: Miscellaneous System Information diff --git a/Lib/test/test_posix.py b/Lib/test/test_posix.py --- a/Lib/test/test_posix.py +++ b/Lib/test/test_posix.py @@ -829,6 +829,138 @@ finally: posix.close(f) + requires_sched_h = unittest.skipUnless(hasattr(posix, 'sched_yield'), + "don't have scheduling support") + + @requires_sched_h + def test_sched_yield(self): + # This has no error conditions (at least on Linux). + posix.sched_yield() + + @requires_sched_h + def test_sched_priority(self): + # Round-robin usually has interesting priorities. + pol = posix.SCHED_RR + lo = posix.sched_get_priority_min(pol) + hi = posix.sched_get_priority_max(pol) + self.assertIsInstance(lo, int) + self.assertIsInstance(hi, int) + self.assertGreaterEqual(hi, lo) + self.assertRaises(OSError, posix.sched_get_priority_min, -23) + self.assertRaises(OSError, posix.sched_get_priority_max, -23) + + @requires_sched_h + def test_get_and_set_scheduler_and_param(self): + possible_schedulers = [sched for name, sched in posix.__dict__.items() + if name.startswith("SCHED_")] + mine = posix.sched_getscheduler(0) + self.assertIn(mine, possible_schedulers) + try: + init = posix.sched_getscheduler(1) + except OSError as e: + if e.errno != errno.EPERM: + raise + else: + self.assertIn(init, possible_schedulers) + self.assertRaises(OSError, posix.sched_getscheduler, -1) + self.assertRaises(OSError, posix.sched_getparam, -1) + param = posix.sched_getparam(0) + self.assertIsInstance(param.sched_priority, int) + posix.sched_setscheduler(0, mine, param) + posix.sched_setparam(0, param) + self.assertRaises(OSError, posix.sched_setparam, -1, param) + self.assertRaises(OSError, posix.sched_setscheduler, -1, mine, param) + self.assertRaises(TypeError, posix.sched_setscheduler, 0, mine, None) + self.assertRaises(TypeError, posix.sched_setparam, 0, 43) + param = posix.sched_param(None) + self.assertRaises(TypeError, posix.sched_setparam, 0, param) + large = 214748364700 + param = posix.sched_param(large) + self.assertRaises(OverflowError, posix.sched_setparam, 0, param) + param = posix.sched_param(sched_priority=-large) + self.assertRaises(OverflowError, posix.sched_setparam, 0, param) + + @requires_sched_h + def test_sched_rr_get_interval(self): + interval = posix.sched_rr_get_interval(0) + self.assertIsInstance(interval, float) + # Reasonable constraints, I think. + self.assertGreaterEqual(interval, 0.) + self.assertLess(interval, 1.) + + @requires_sched_h + def test_sched_affinity(self): + mask = posix.sched_getaffinity(0, 1024) + self.assertGreaterEqual(mask.count(), 1) + self.assertIsInstance(mask, posix.cpu_set) + self.assertRaises(OSError, posix.sched_getaffinity, -1, 1024) + empty = posix.cpu_set(10) + posix.sched_setaffinity(0, mask) + self.assertRaises(OSError, posix.sched_setaffinity, 0, empty) + self.assertRaises(OSError, posix.sched_setaffinity, -1, mask) + + @requires_sched_h + def test_cpu_set_basic(self): + s = posix.cpu_set(10) + self.assertEqual(len(s), 10) + self.assertEqual(s.count(), 0) + s.set(0) + s.set(9) + self.assertTrue(s.isset(0)) + self.assertTrue(s.isset(9)) + self.assertFalse(s.isset(5)) + self.assertEqual(s.count(), 2) + s.clear(0) + self.assertFalse(s.isset(0)) + self.assertEqual(s.count(), 1) + s.zero() + self.assertFalse(s.isset(0)) + self.assertFalse(s.isset(9)) + self.assertEqual(s.count(), 0) + self.assertRaises(ValueError, s.set, -1) + self.assertRaises(ValueError, s.set, 10) + self.assertRaises(ValueError, s.clear, -1) + self.assertRaises(ValueError, s.clear, 10) + self.assertRaises(ValueError, s.isset, -1) + self.assertRaises(ValueError, s.isset, 10) + + @requires_sched_h + def test_cpu_set_cmp(self): + self.assertNotEqual(posix.cpu_set(11), posix.cpu_set(12)) + l = posix.cpu_set(10) + r = posix.cpu_set(10) + self.assertEqual(l, r) + l.set(1) + self.assertNotEqual(l, r) + r.set(1) + self.assertEqual(l, r) + + @requires_sched_h + def test_cpu_set_bitwise(self): + l = posix.cpu_set(5) + l.set(0) + l.set(1) + r = posix.cpu_set(5) + r.set(1) + r.set(2) + b = l & r + self.assertEqual(b.count(), 1) + self.assertTrue(b.isset(1)) + b = l | r + self.assertEqual(b.count(), 3) + self.assertTrue(b.isset(0)) + self.assertTrue(b.isset(1)) + self.assertTrue(b.isset(2)) + b = l ^ r + self.assertEqual(b.count(), 2) + self.assertTrue(b.isset(0)) + self.assertFalse(b.isset(1)) + self.assertTrue(b.isset(2)) + b = l + b |= r + self.assertIs(b, l) + self.assertEqual(l.count(), 3) + class PosixGroupsTester(unittest.TestCase): def setUp(self): @@ -864,7 +996,6 @@ posix.setgroups(groups) self.assertListEqual(groups, posix.getgroups()) - def test_main(): try: support.run_unittest(PosixTester, PosixGroupsTester) diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -251,6 +251,11 @@ Library ------- +- Issue #12655: Expose functions from sched.h in the os module: sched_yield(), + sched_setscheduler(), sched_getscheduler(), sched_setparam(), + sched_get_min_priority(), sched_get_max_priority(), sched_rr_get_interval(), + sched_getaffinity(), sched_setaffinity(). + - Issues #11104, #8688: Fix the behavior of distutils' sdist command with manually-maintained MANIFEST files. diff --git a/Modules/posixmodule.c b/Modules/posixmodule.c --- a/Modules/posixmodule.c +++ b/Modules/posixmodule.c @@ -105,6 +105,10 @@ #include #endif +#ifdef HAVE_SCHED_H +#include +#endif + #if defined(__FreeBSD__) || defined(__DragonFly__) || defined(__APPLE__) #ifdef HAVE_SYS_SOCKET_H #include @@ -1605,6 +1609,7 @@ static int initialized; static PyTypeObject StatResultType; static PyTypeObject StatVFSResultType; +static PyTypeObject SchedParamType; static newfunc structseq_new; static PyObject * @@ -4544,6 +4549,542 @@ } #endif +#ifdef HAVE_SCHED_H + +PyDoc_STRVAR(posix_sched_get_priority_max__doc__, +"sched_get_priority_max(policy)\n\n\ +Get the maximum scheduling priority for *policy*."); + +static PyObject * +posix_sched_get_priority_max(PyObject *self, PyObject *args) +{ + int policy, max; + + if (!PyArg_ParseTuple(args, "i:sched_get_priority_max", &policy)) + return NULL; + max = sched_get_priority_max(policy); + if (max < 0) + return posix_error(); + return PyLong_FromLong(max); +} + +PyDoc_STRVAR(posix_sched_get_priority_min__doc__, +"sched_get_priority_min(policy)\n\n\ +Get the minimum scheduling priority for *policy*."); + +static PyObject * +posix_sched_get_priority_min(PyObject *self, PyObject *args) +{ + int policy, min; + + if (!PyArg_ParseTuple(args, "i:sched_get_priority_min", &policy)) + return NULL; + min = sched_get_priority_min(policy); + if (min < 0) + return posix_error(); + return PyLong_FromLong(min); +} + +PyDoc_STRVAR(posix_sched_getscheduler__doc__, +"sched_getscheduler(pid)\n\n\ +Get the scheduling policy for the process with a PID of *pid*.\n\ +Passing a PID of 0 returns the scheduling policy for the calling process."); + +static PyObject * +posix_sched_getscheduler(PyObject *self, PyObject *args) +{ + pid_t pid; + int policy; + + if (!PyArg_ParseTuple(args, _Py_PARSE_PID ":sched_getscheduler", &pid)) + return NULL; + policy = sched_getscheduler(pid); + if (policy < 0) + return posix_error(); + return PyLong_FromLong(policy); +} + +static PyObject * +sched_param_new(PyTypeObject *type, PyObject *args, PyObject *kwargs) +{ + PyObject *res, *priority; + static char *kwlist[] = {"sched_priority"}; + + if (!PyArg_ParseTupleAndKeywords(args, kwargs, "O:sched_param", kwlist, &priority)) + return NULL; + res = PyStructSequence_New(type); + if (!res) + return NULL; + Py_INCREF(priority); + PyStructSequence_SET_ITEM(res, 0, priority); + return res; +} + +PyDoc_STRVAR(sched_param__doc__, +"sched_param(sched_priority): A scheduling parameter.\n\n\ +Current has only one field: sched_priority"); + +static PyStructSequence_Field sched_param_fields[] = { + {"sched_priority", "the scheduling priority"}, + {0} +}; + +static PyStructSequence_Desc sched_param_desc = { + "sched_param", /* name */ + sched_param__doc__, /* doc */ + sched_param_fields, + 1 +}; + +static int +convert_sched_param(PyObject *param, struct sched_param *res) +{ + long priority; + + if (Py_TYPE(param) != &SchedParamType) { + PyErr_SetString(PyExc_TypeError, "must have a sched_param object"); + return 0; + } + priority = PyLong_AsLong(PyStructSequence_GET_ITEM(param, 0)); + if (priority == -1 && PyErr_Occurred()) + return 0; + if (priority > INT_MAX || priority < INT_MIN) { + PyErr_SetString(PyExc_OverflowError, "sched_priority out of range"); + return 0; + } + res->sched_priority = Py_SAFE_DOWNCAST(priority, long, int); + return 1; +} + +PyDoc_STRVAR(posix_sched_setscheduler__doc__, +"sched_setscheduler(pid, policy, param)\n\n\ +Set the scheduling policy, *policy*, for *pid*.\n\ +If *pid* is 0, the calling process is changed.\n\ +*param* is an instance of sched_param."); + +static PyObject * +posix_sched_setscheduler(PyObject *self, PyObject *args) +{ + pid_t pid; + int policy; + struct sched_param param; + + if (!PyArg_ParseTuple(args, _Py_PARSE_PID "iO&:sched_setscheduler", + &pid, &policy, &convert_sched_param, ¶m)) + return NULL; + if (sched_setscheduler(pid, policy, ¶m)) + return posix_error(); + Py_RETURN_NONE; +} + +PyDoc_STRVAR(posix_sched_getparam__doc__, +"sched_getparam(pid) -> sched_param\n\n\ +Returns scheduling parameters for the process with *pid* as an instance of the\n\ +sched_param class. A PID of 0 means the calling process."); + +static PyObject * +posix_sched_getparam(PyObject *self, PyObject *args) +{ + pid_t pid; + struct sched_param param; + PyObject *res, *priority; + + if (!PyArg_ParseTuple(args, _Py_PARSE_PID ":sched_getparam", &pid)) + return NULL; + if (sched_getparam(pid, ¶m)) + return posix_error(); + res = PyStructSequence_New(&SchedParamType); + if (!res) + return NULL; + priority = PyLong_FromLong(param.sched_priority); + if (!priority) { + Py_DECREF(res); + return NULL; + } + PyStructSequence_SET_ITEM(res, 0, priority); + return res; +} + +PyDoc_STRVAR(posix_sched_setparam__doc__, +"sched_setparam(pid, param)\n\n\ +Set scheduling parameters for a process with PID *pid*.\n\ +A PID of 0 means the calling process."); + +static PyObject * +posix_sched_setparam(PyObject *self, PyObject *args) +{ + pid_t pid; + struct sched_param param; + + if (!PyArg_ParseTuple(args, _Py_PARSE_PID "O&:sched_setparam", + &pid, &convert_sched_param, ¶m)) + return NULL; + if (sched_setparam(pid, ¶m)) + return posix_error(); + Py_RETURN_NONE; +} + +PyDoc_STRVAR(posix_sched_rr_get_interval__doc__, +"sched_rr_get_interval(pid) -> float\n\n\ +Return the round-robin quantum for the process with PID *pid* in seconds."); + +static PyObject * +posix_sched_rr_get_interval(PyObject *self, PyObject *args) +{ + pid_t pid; + struct timespec interval; + + if (!PyArg_ParseTuple(args, _Py_PARSE_PID ":sched_rr_get_interval", &pid)) + return NULL; + if (sched_rr_get_interval(pid, &interval)) + return posix_error(); + return PyFloat_FromDouble((double)interval.tv_sec + 1e-9*interval.tv_nsec); +} + +PyDoc_STRVAR(posix_sched_yield__doc__, +"sched_yield()\n\n\ +Voluntarily relinquish the CPU."); + +static PyObject * +posix_sched_yield(PyObject *self, PyObject *noargs) +{ + if (sched_yield()) + return posix_error(); + Py_RETURN_NONE; +} + +typedef struct { + PyObject_HEAD; + Py_ssize_t size; + int ncpus; + cpu_set_t *set; +} Py_cpu_set; + +static PyTypeObject cpu_set_type; + +static void +cpu_set_dealloc(Py_cpu_set *set) +{ + assert(set->set); + CPU_FREE(set->set); + Py_TYPE(set)->tp_free(set); +} + +static Py_cpu_set * +make_new_cpu_set(PyTypeObject *type, Py_ssize_t size) +{ + Py_cpu_set *set; + + if (size < 0) { + PyErr_SetString(PyExc_ValueError, "negative size"); + return NULL; + } + set = (Py_cpu_set *)type->tp_alloc(type, 0); + if (!set) + return NULL; + set->ncpus = size; + set->size = CPU_ALLOC_SIZE(size); + set->set = CPU_ALLOC(size); + if (!set->set) { + type->tp_free(set); + PyErr_NoMemory(); + return NULL; + } + CPU_ZERO_S(set->size, set->set); + return set; +} + +static PyObject * +cpu_set_new(PyTypeObject *type, PyObject *args, PyObject *kwargs) +{ + int size; + + if (!_PyArg_NoKeywords("cpu_set()", kwargs) || + !PyArg_ParseTuple(args, "i:cpu_set", &size)) + return NULL; + return (PyObject *)make_new_cpu_set(type, size); +} + +static PyObject * +cpu_set_repr(Py_cpu_set *set) +{ + return PyUnicode_FromFormat("", set->ncpus); +} + +static Py_ssize_t +cpu_set_len(Py_cpu_set *set) +{ + return set->ncpus; +} + +static int +_get_cpu(Py_cpu_set *set, const char *requester, PyObject *args) +{ + int cpu; + if (!PyArg_ParseTuple(args, requester, &cpu)) + return -1; + if (cpu < 0) { + PyErr_SetString(PyExc_ValueError, "cpu < 0 not valid"); + return -1; + } + if (cpu >= set->ncpus) { + PyErr_SetString(PyExc_ValueError, "cpu too large for set"); + return -1; + } + return cpu; +} + +PyDoc_STRVAR(cpu_set_set_doc, +"cpu_set.set(i)\n\n\ +Add CPU *i* to the set."); + +static PyObject * +cpu_set_set(Py_cpu_set *set, PyObject *args) +{ + int cpu = _get_cpu(set, "i|set", args); + if (cpu == -1) + return NULL; + CPU_SET_S(cpu, set->size, set->set); + Py_RETURN_NONE; +} + +PyDoc_STRVAR(cpu_set_count_doc, +"cpu_set.count() -> int\n\n\ +Return the number of CPUs active in the set."); + +static PyObject * +cpu_set_count(Py_cpu_set *set, PyObject *noargs) +{ + return PyLong_FromLong(CPU_COUNT_S(set->size, set->set)); +} + +PyDoc_STRVAR(cpu_set_clear_doc, +"cpu_set.clear(i)\n\n\ +Remove CPU *i* from the set."); + +static PyObject * +cpu_set_clear(Py_cpu_set *set, PyObject *args) +{ + int cpu = _get_cpu(set, "i|clear", args); + if (cpu == -1) + return NULL; + CPU_CLR_S(cpu, set->size, set->set); + Py_RETURN_NONE; +} + +PyDoc_STRVAR(cpu_set_isset_doc, +"cpu_set.isset(i) -> bool\n\n\ +Test if CPU *i* is in the set."); + +static PyObject * +cpu_set_isset(Py_cpu_set *set, PyObject *args) +{ + int cpu = _get_cpu(set, "i|isset", args); + if (cpu == -1) + return NULL; + if (CPU_ISSET_S(cpu, set->size, set->set)) + Py_RETURN_TRUE; + Py_RETURN_FALSE; +} + +PyDoc_STRVAR(cpu_set_zero_doc, +"cpu_set.zero()\n\n\ +Clear the cpu_set."); + +static PyObject * +cpu_set_zero(Py_cpu_set *set, PyObject *noargs) +{ + CPU_ZERO_S(set->size, set->set); + Py_RETURN_NONE; +} + +static PyObject * +cpu_set_richcompare(Py_cpu_set *set, Py_cpu_set *other, int op) +{ + int eq; + + if ((op != Py_EQ && op != Py_NE) || Py_TYPE(other) != &cpu_set_type) { + Py_INCREF(Py_NotImplemented); + return Py_NotImplemented; + } + eq = set->ncpus == other->ncpus && CPU_EQUAL_S(set->size, set->set, other->set); + if ((op == Py_EQ) ? eq : !eq) + Py_RETURN_TRUE; + else + Py_RETURN_FALSE; +} + +#define CPU_SET_BINOP(name, op) \ + static PyObject * \ + do_cpu_set_##name(Py_cpu_set *left, Py_cpu_set *right, Py_cpu_set *res) { \ + if (res) { \ + Py_INCREF(res); \ + } \ + else { \ + res = make_new_cpu_set(&cpu_set_type, left->size); \ + if (!res) \ + return NULL; \ + } \ + if (Py_TYPE(right) != &cpu_set_type || left->size != right->size) { \ + Py_DECREF(res); \ + Py_INCREF(Py_NotImplemented); \ + return Py_NotImplemented; \ + } \ + assert(left->size == right->size == res->size); \ + op(res->size, res->set, left->set, right->set); \ + return (PyObject *)res; \ + } \ + static PyObject * \ + cpu_set_##name(Py_cpu_set *left, Py_cpu_set *right) { \ + return do_cpu_set_##name(left, right, NULL); \ + } \ + static PyObject * \ + cpu_set_i##name(Py_cpu_set *left, Py_cpu_set *right) { \ + return do_cpu_set_##name(left, right, left); \ + } \ + +CPU_SET_BINOP(and, CPU_AND_S) +CPU_SET_BINOP(or, CPU_OR_S) +CPU_SET_BINOP(xor, CPU_XOR_S) +#undef CPU_SET_BINOP + +PyDoc_STRVAR(cpu_set_doc, +"cpu_set(size)\n\n\ +Create an empty mask of CPUs."); + +static PyNumberMethods cpu_set_as_number = { + 0, /*nb_add*/ + 0, /*nb_subtract*/ + 0, /*nb_multiply*/ + 0, /*nb_remainder*/ + 0, /*nb_divmod*/ + 0, /*nb_power*/ + 0, /*nb_negative*/ + 0, /*nb_positive*/ + 0, /*nb_absolute*/ + 0, /*nb_bool*/ + 0, /*nb_invert*/ + 0, /*nb_lshift*/ + 0, /*nb_rshift*/ + (binaryfunc)cpu_set_and, /*nb_and*/ + (binaryfunc)cpu_set_xor, /*nb_xor*/ + (binaryfunc)cpu_set_or, /*nb_or*/ + 0, /*nb_int*/ + 0, /*nb_reserved*/ + 0, /*nb_float*/ + 0, /*nb_inplace_add*/ + 0, /*nb_inplace_subtract*/ + 0, /*nb_inplace_multiply*/ + 0, /*nb_inplace_remainder*/ + 0, /*nb_inplace_power*/ + 0, /*nb_inplace_lshift*/ + 0, /*nb_inplace_rshift*/ + (binaryfunc)cpu_set_iand, /*nb_inplace_and*/ + (binaryfunc)cpu_set_ixor, /*nb_inplace_xor*/ + (binaryfunc)cpu_set_ior, /*nb_inplace_or*/ +}; + +static PySequenceMethods cpu_set_as_sequence = { + (lenfunc)cpu_set_len, /* sq_length */ +}; + +static PyMethodDef cpu_set_methods[] = { + {"clear", (PyCFunction)cpu_set_clear, METH_VARARGS, cpu_set_clear_doc}, + {"count", (PyCFunction)cpu_set_count, METH_NOARGS, cpu_set_count_doc}, + {"isset", (PyCFunction)cpu_set_isset, METH_VARARGS, cpu_set_isset_doc}, + {"set", (PyCFunction)cpu_set_set, METH_VARARGS, cpu_set_set_doc}, + {"zero", (PyCFunction)cpu_set_zero, METH_NOARGS, cpu_set_zero_doc}, + {NULL, NULL} /* sentinel */ +}; + +static PyTypeObject cpu_set_type = { + PyVarObject_HEAD_INIT(&PyType_Type, 0) + "posix.cpu_set", /* tp_name */ + sizeof(Py_cpu_set), /* tp_basicsize */ + 0, /* tp_itemsize */ + /* methods */ + (destructor)cpu_set_dealloc, /* tp_dealloc */ + 0, /* tp_print */ + 0, /* tp_getattr */ + 0, /* tp_setattr */ + 0, /* tp_reserved */ + (reprfunc)cpu_set_repr, /* tp_repr */ + &cpu_set_as_number, /* tp_as_number */ + &cpu_set_as_sequence, /* tp_as_sequence */ + 0, /* tp_as_mapping */ + PyObject_HashNotImplemented, /* tp_hash */ + 0, /* tp_call */ + 0, /* tp_str */ + PyObject_GenericGetAttr, /* tp_getattro */ + 0, /* tp_setattro */ + 0, /* tp_as_buffer */ + Py_TPFLAGS_DEFAULT, /* tp_flags */ + cpu_set_doc, /* tp_doc */ + 0, /* tp_traverse */ + 0, /* tp_clear */ + (richcmpfunc)cpu_set_richcompare, /* tp_richcompare */ + 0, /* tp_weaklistoffset */ + 0, /* tp_iter */ + 0, /* tp_iternext */ + cpu_set_methods, /* tp_methods */ + 0, /* tp_members */ + 0, /* tp_getset */ + 0, /* tp_base */ + 0, /* tp_dict */ + 0, /* tp_descr_get */ + 0, /* tp_descr_set */ + 0, /* tp_dictoffset */ + 0, /* tp_init */ + PyType_GenericAlloc, /* tp_alloc */ + cpu_set_new, /* tp_new */ + PyObject_Del, /* tp_free */ +}; + +PyDoc_STRVAR(posix_sched_setaffinity__doc__, +"sched_setaffinity(pid, cpu_set)\n\n\ +Set the affinity of the process with PID *pid* to *cpu_set*."); + +static PyObject * +posix_sched_setaffinity(PyObject *self, PyObject *args) +{ + pid_t pid; + Py_cpu_set *cpu_set; + + if (!PyArg_ParseTuple(args, _Py_PARSE_PID "O!|sched_setaffinity", + &pid, &cpu_set_type, &cpu_set)) + return NULL; + if (sched_setaffinity(pid, cpu_set->size, cpu_set->set)) + return posix_error(); + Py_RETURN_NONE; +} + +PyDoc_STRVAR(posix_sched_getaffinity__doc__, +"sched_getaffinity(pid, ncpus) -> cpu_set\n\n\ +Return the affinity of the process with PID *pid*.\n\ +The returned cpu_set will be of size *ncpus*."); + +static PyObject * +posix_sched_getaffinity(PyObject *self, PyObject *args) +{ + pid_t pid; + int ncpus; + Py_cpu_set *res; + + if (!PyArg_ParseTuple(args, _Py_PARSE_PID "i|sched_getaffinity", + &pid, &ncpus)) + return NULL; + res = make_new_cpu_set(&cpu_set_type, ncpus); + if (!res) + return NULL; + if (sched_getaffinity(pid, res->size, res->set)) { + Py_DECREF(res); + return posix_error(); + } + return (PyObject *)res; +} + +#endif /* HAVE_SCHED_H */ + /* AIX uses /dev/ptc but is otherwise the same as /dev/ptmx */ /* IRIX has both /dev/ptc and /dev/ptmx, use ptmx */ #if defined(HAVE_DEV_PTC) && !defined(HAVE_DEV_PTMX) @@ -9506,6 +10047,18 @@ #ifdef HAVE_FORK {"fork", posix_fork, METH_NOARGS, posix_fork__doc__}, #endif /* HAVE_FORK */ +#ifdef HAVE_SCHED_H + {"sched_get_priority_max", posix_sched_get_priority_max, METH_VARARGS, posix_sched_get_priority_max__doc__}, + {"sched_get_priority_min", posix_sched_get_priority_min, METH_VARARGS, posix_sched_get_priority_min__doc__}, + {"sched_getparam", posix_sched_getparam, METH_VARARGS, posix_sched_getparam__doc__}, + {"sched_getscheduler", posix_sched_getscheduler, METH_VARARGS, posix_sched_getscheduler__doc__}, + {"sched_rr_get_interval", posix_sched_rr_get_interval, METH_VARARGS, posix_sched_rr_get_interval__doc__}, + {"sched_setparam", posix_sched_setparam, METH_VARARGS, posix_sched_setparam__doc__}, + {"sched_setscheduler", posix_sched_setscheduler, METH_VARARGS, posix_sched_setscheduler__doc__}, + {"sched_yield", posix_sched_yield, METH_NOARGS, posix_sched_yield__doc__}, + {"sched_setaffinity", posix_sched_setaffinity, METH_VARARGS, posix_sched_setaffinity__doc__}, + {"sched_getaffinity", posix_sched_getaffinity, METH_VARARGS, posix_sched_getaffinity__doc__}, +#endif #if defined(HAVE_OPENPTY) || defined(HAVE__GETPTY) || defined(HAVE_DEV_PTMX) {"openpty", posix_openpty, METH_NOARGS, posix_openpty__doc__}, #endif /* HAVE_OPENPTY || HAVE__GETPTY || HAVE_DEV_PTMX */ @@ -10243,6 +10796,24 @@ #endif #endif +#ifdef HAVE_SCHED_H + if (ins(d, "SCHED_FIFO", (long)SCHED_FIFO)) return -1; + if (ins(d, "SCHED_RR", (long)SCHED_RR)) return -1; +#ifdef SCHED_SPORADIC + if (ins(d, "SCHED_SPORADIC", (long)SCHED_SPORADIC) return -1; +#endif + if (ins(d, "SCHED_OTHER", (long)SCHED_OTHER)) return -1; +#ifdef SCHED_BATCH + if (ins(d, "SCHED_BATCH", (long)SCHED_BATCH)) return -1; +#endif +#ifdef SCHED_IDLE + if (ins(d, "SCHED_IDLE", (long)SCHED_IDLE)) return -1; +#endif +#ifdef SCHED_RESET_ON_FORK + if (ins(d, "SCHED_RESET_ON_FORK", (long)SCHED_RESET_ON_FORK)) return -1; +#endif +#endif + #if defined(PYOS_OS2) if (insertvalues(d)) return -1; #endif @@ -10305,6 +10876,11 @@ Py_INCREF(PyExc_OSError); PyModule_AddObject(m, "error", PyExc_OSError); + if (PyType_Ready(&cpu_set_type) < 0) + return NULL; + Py_INCREF(&cpu_set_type); + PyModule_AddObject(m, "cpu_set", (PyObject *)&cpu_set_type); + #ifdef HAVE_PUTENV if (posix_putenv_garbage == NULL) posix_putenv_garbage = PyDict_New(); @@ -10335,6 +10911,12 @@ ticks_per_second = 60; /* magic fallback value; may be bogus */ # endif #endif + +#ifdef HAVE_SCHED_H + sched_param_desc.name = MODNAME ".sched_param"; + PyStructSequence_InitType(&SchedParamType, &sched_param_desc); + SchedParamType.tp_new = sched_param_new; +#endif } #if defined(HAVE_WAITID) && !defined(__APPLE__) Py_INCREF((PyObject*) &WaitidResultType); @@ -10345,6 +10927,8 @@ Py_INCREF((PyObject*) &StatVFSResultType); PyModule_AddObject(m, "statvfs_result", (PyObject*) &StatVFSResultType); + Py_INCREF(&SchedParamType); + PyModule_AddObject(m, "sched_param", (PyObject *)&SchedParamType); initialized = 1; #ifdef __APPLE__ diff --git a/configure b/configure --- a/configure +++ b/configure @@ -6092,7 +6092,7 @@ for ac_header in asm/types.h conio.h curses.h direct.h dlfcn.h errno.h \ fcntl.h grp.h \ ieeefp.h io.h langinfo.h libintl.h ncurses.h poll.h process.h pthread.h \ -shadow.h signal.h stdint.h stropts.h termios.h \ +sched.h shadow.h signal.h stdint.h stropts.h termios.h \ unistd.h utime.h \ sys/audioio.h sys/bsdtty.h sys/epoll.h sys/event.h sys/file.h sys/loadavg.h \ sys/lock.h sys/mkdev.h sys/modem.h \ @@ -14418,8 +14418,8 @@ cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 # Files that config.status was made for. -config_files="`echo $ac_config_files`" -config_headers="`echo $ac_config_headers`" +config_files="$ac_config_files" +config_headers="$ac_config_headers" _ACEOF diff --git a/configure.in b/configure.in --- a/configure.in +++ b/configure.in @@ -1301,7 +1301,7 @@ AC_CHECK_HEADERS(asm/types.h conio.h curses.h direct.h dlfcn.h errno.h \ fcntl.h grp.h \ ieeefp.h io.h langinfo.h libintl.h ncurses.h poll.h process.h pthread.h \ -shadow.h signal.h stdint.h stropts.h termios.h \ +sched.h shadow.h signal.h stdint.h stropts.h termios.h \ unistd.h utime.h \ sys/audioio.h sys/bsdtty.h sys/epoll.h sys/event.h sys/file.h sys/loadavg.h \ sys/lock.h sys/mkdev.h sys/modem.h \ diff --git a/pyconfig.h.in b/pyconfig.h.in --- a/pyconfig.h.in +++ b/pyconfig.h.in @@ -650,6 +650,9 @@ /* Define to 1 if you have the `round' function. */ #undef HAVE_ROUND +/* Define to 1 if you have the header file. */ +#undef HAVE_SCHED_H + /* Define to 1 if you have the `select' function. */ #undef HAVE_SELECT -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Wed Aug 3 00:41:43 2011 From: python-checkins at python.org (benjamin.peterson) Date: Wed, 03 Aug 2011 00:41:43 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_sched=2Eh_can_exist_without?= =?utf8?q?_sched_affinity_support?= Message-ID: http://hg.python.org/cpython/rev/4b8e407e9a32 changeset: 71705:4b8e407e9a32 user: Benjamin Peterson date: Tue Aug 02 17:41:34 2011 -0500 summary: sched.h can exist without sched affinity support files: Lib/test/test_posix.py | 10 ++++++---- Modules/posixmodule.c | 8 ++++++++ configure | 1 + configure.in | 1 + pyconfig.h.in | 3 +++ 5 files changed, 19 insertions(+), 4 deletions(-) diff --git a/Lib/test/test_posix.py b/Lib/test/test_posix.py --- a/Lib/test/test_posix.py +++ b/Lib/test/test_posix.py @@ -831,6 +831,8 @@ requires_sched_h = unittest.skipUnless(hasattr(posix, 'sched_yield'), "don't have scheduling support") + requires_sched_affinity = unittest.skipUnless(hasattr(posix, 'cpu_set'), + "dont' have sched affinity support") @requires_sched_h def test_sched_yield(self): @@ -888,7 +890,7 @@ self.assertGreaterEqual(interval, 0.) self.assertLess(interval, 1.) - @requires_sched_h + @requires_sched_affinity def test_sched_affinity(self): mask = posix.sched_getaffinity(0, 1024) self.assertGreaterEqual(mask.count(), 1) @@ -899,7 +901,7 @@ self.assertRaises(OSError, posix.sched_setaffinity, 0, empty) self.assertRaises(OSError, posix.sched_setaffinity, -1, mask) - @requires_sched_h + @requires_sched_affinity def test_cpu_set_basic(self): s = posix.cpu_set(10) self.assertEqual(len(s), 10) @@ -924,7 +926,7 @@ self.assertRaises(ValueError, s.isset, -1) self.assertRaises(ValueError, s.isset, 10) - @requires_sched_h + @requires_sched_affinity def test_cpu_set_cmp(self): self.assertNotEqual(posix.cpu_set(11), posix.cpu_set(12)) l = posix.cpu_set(10) @@ -935,7 +937,7 @@ r.set(1) self.assertEqual(l, r) - @requires_sched_h + @requires_sched_affinity def test_cpu_set_bitwise(self): l = posix.cpu_set(5) l.set(0) diff --git a/Modules/posixmodule.c b/Modules/posixmodule.c --- a/Modules/posixmodule.c +++ b/Modules/posixmodule.c @@ -4753,6 +4753,8 @@ Py_RETURN_NONE; } +#ifdef HAVE_SCHED_SETAFFINITY + typedef struct { PyObject_HEAD; Py_ssize_t size; @@ -5083,6 +5085,8 @@ return (PyObject *)res; } +#endif /* HAVE_SCHED_SETAFFINITY */ + #endif /* HAVE_SCHED_H */ /* AIX uses /dev/ptc but is otherwise the same as /dev/ptmx */ @@ -10056,9 +10060,11 @@ {"sched_setparam", posix_sched_setparam, METH_VARARGS, posix_sched_setparam__doc__}, {"sched_setscheduler", posix_sched_setscheduler, METH_VARARGS, posix_sched_setscheduler__doc__}, {"sched_yield", posix_sched_yield, METH_NOARGS, posix_sched_yield__doc__}, +#ifdef HAVE_SCHED_SETAFFINITY {"sched_setaffinity", posix_sched_setaffinity, METH_VARARGS, posix_sched_setaffinity__doc__}, {"sched_getaffinity", posix_sched_getaffinity, METH_VARARGS, posix_sched_getaffinity__doc__}, #endif +#endif #if defined(HAVE_OPENPTY) || defined(HAVE__GETPTY) || defined(HAVE_DEV_PTMX) {"openpty", posix_openpty, METH_NOARGS, posix_openpty__doc__}, #endif /* HAVE_OPENPTY || HAVE__GETPTY || HAVE_DEV_PTMX */ @@ -10876,10 +10882,12 @@ Py_INCREF(PyExc_OSError); PyModule_AddObject(m, "error", PyExc_OSError); +#ifdef HAVE_SCHED_SETAFFINITY if (PyType_Ready(&cpu_set_type) < 0) return NULL; Py_INCREF(&cpu_set_type); PyModule_AddObject(m, "cpu_set", (PyObject *)&cpu_set_type); +#endif #ifdef HAVE_PUTENV if (posix_putenv_garbage == NULL) diff --git a/configure b/configure --- a/configure +++ b/configure @@ -9339,6 +9339,7 @@ select sem_open sem_timedwait sem_getvalue sem_unlink sendfile setegid seteuid \ setgid sethostname \ setlocale setregid setreuid setresuid setresgid setsid setpgid setpgrp setpriority setuid setvbuf \ + sched_setaffinity \ sigaction sigaltstack siginterrupt sigpending sigrelse \ sigtimedwait sigwait sigwaitinfo snprintf strftime strlcpy symlinkat sync \ sysconf tcgetpgrp tcsetpgrp tempnam timegm times tmpfile tmpnam tmpnam_r \ diff --git a/configure.in b/configure.in --- a/configure.in +++ b/configure.in @@ -2537,6 +2537,7 @@ select sem_open sem_timedwait sem_getvalue sem_unlink sendfile setegid seteuid \ setgid sethostname \ setlocale setregid setreuid setresuid setresgid setsid setpgid setpgrp setpriority setuid setvbuf \ + sched_setaffinity \ sigaction sigaltstack siginterrupt sigpending sigrelse \ sigtimedwait sigwait sigwaitinfo snprintf strftime strlcpy symlinkat sync \ sysconf tcgetpgrp tcsetpgrp tempnam timegm times tmpfile tmpnam tmpnam_r \ diff --git a/pyconfig.h.in b/pyconfig.h.in --- a/pyconfig.h.in +++ b/pyconfig.h.in @@ -653,6 +653,9 @@ /* Define to 1 if you have the header file. */ #undef HAVE_SCHED_H +/* Define to 1 if you have the `sched_setaffinity' function. */ +#undef HAVE_SCHED_SETAFFINITY + /* Define to 1 if you have the `select' function. */ #undef HAVE_SELECT -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Wed Aug 3 00:44:36 2011 From: python-checkins at python.org (benjamin.peterson) Date: Wed, 03 Aug 2011 00:44:36 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_make=5Fnew=5Fcpu=5Fset_take?= =?utf8?q?s_the_number_of_cpus?= Message-ID: http://hg.python.org/cpython/rev/515db2c10819 changeset: 71706:515db2c10819 user: Benjamin Peterson date: Tue Aug 02 17:44:26 2011 -0500 summary: make_new_cpu_set takes the number of cpus files: Modules/posixmodule.c | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Modules/posixmodule.c b/Modules/posixmodule.c --- a/Modules/posixmodule.c +++ b/Modules/posixmodule.c @@ -4923,7 +4923,7 @@ Py_INCREF(res); \ } \ else { \ - res = make_new_cpu_set(&cpu_set_type, left->size); \ + res = make_new_cpu_set(&cpu_set_type, left->ncpus); \ if (!res) \ return NULL; \ } \ -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Wed Aug 3 00:45:19 2011 From: python-checkins at python.org (benjamin.peterson) Date: Wed, 03 Aug 2011 00:45:19 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_move_news_note_to_correct_s?= =?utf8?q?ection?= Message-ID: http://hg.python.org/cpython/rev/c1ee6de35c20 changeset: 71707:c1ee6de35c20 user: Benjamin Peterson date: Tue Aug 02 17:45:00 2011 -0500 summary: move news note to correct section files: Misc/NEWS | 4 ++-- 1 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -10,8 +10,6 @@ Core and Builtins ----------------- -- Add ThreadError to threading.__all__. - - Make type(None), type(Ellipsis), and type(NotImplemented) callable. They return the respective singleton instances. @@ -256,6 +254,8 @@ sched_get_min_priority(), sched_get_max_priority(), sched_rr_get_interval(), sched_getaffinity(), sched_setaffinity(). +- Add ThreadError to threading.__all__. + - Issues #11104, #8688: Fix the behavior of distutils' sdist command with manually-maintained MANIFEST files. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Wed Aug 3 01:07:46 2011 From: python-checkins at python.org (benjamin.peterson) Date: Wed, 03 Aug 2011 01:07:46 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_check_individually_for_some?= =?utf8?q?_for_sched=5F_functions?= Message-ID: http://hg.python.org/cpython/rev/21730a883f20 changeset: 71708:21730a883f20 user: Benjamin Peterson date: Tue Aug 02 18:07:32 2011 -0500 summary: check individually for some for sched_ functions files: Lib/test/test_posix.py | 4 ++-- Modules/posixmodule.c | 30 ++++++++++++++++++++++++++++++ configure | 2 +- configure.in | 2 +- pyconfig.h.in | 9 +++++++++ 5 files changed, 43 insertions(+), 4 deletions(-) diff --git a/Lib/test/test_posix.py b/Lib/test/test_posix.py --- a/Lib/test/test_posix.py +++ b/Lib/test/test_posix.py @@ -851,7 +851,7 @@ self.assertRaises(OSError, posix.sched_get_priority_min, -23) self.assertRaises(OSError, posix.sched_get_priority_max, -23) - @requires_sched_h + @unittest.skipUnless(hasattr(posix, 'sched_setscheduler'), "can't change scheduler") def test_get_and_set_scheduler_and_param(self): possible_schedulers = [sched for name, sched in posix.__dict__.items() if name.startswith("SCHED_")] @@ -882,7 +882,7 @@ param = posix.sched_param(sched_priority=-large) self.assertRaises(OverflowError, posix.sched_setparam, 0, param) - @requires_sched_h + @unittest.skipUnless(hasattr(posix, "sched_rr_get_interval"), "no function") def test_sched_rr_get_interval(self): interval = posix.sched_rr_get_interval(0) self.assertIsInstance(interval, float) diff --git a/Modules/posixmodule.c b/Modules/posixmodule.c --- a/Modules/posixmodule.c +++ b/Modules/posixmodule.c @@ -4585,6 +4585,8 @@ return PyLong_FromLong(min); } +#ifdef HAVE_SCHED_SETSCHEDULER + PyDoc_STRVAR(posix_sched_getscheduler__doc__, "sched_getscheduler(pid)\n\n\ Get the scheduling policy for the process with a PID of *pid*.\n\ @@ -4604,6 +4606,10 @@ return PyLong_FromLong(policy); } +#endif + +#if defined(HAVE_SCHED_SETSCHEDULER) || defined(HAVE_SCHED_SETPARAM) + static PyObject * sched_param_new(PyTypeObject *type, PyObject *args, PyObject *kwargs) { @@ -4656,6 +4662,10 @@ return 1; } +#endif + +#ifdef HAVE_SCHED_SETSCHEDULER + PyDoc_STRVAR(posix_sched_setscheduler__doc__, "sched_setscheduler(pid, policy, param)\n\n\ Set the scheduling policy, *policy*, for *pid*.\n\ @@ -4677,6 +4687,10 @@ Py_RETURN_NONE; } +#endif + +#ifdef HAVE_SCHED_SETPARAM + PyDoc_STRVAR(posix_sched_getparam__doc__, "sched_getparam(pid) -> sched_param\n\n\ Returns scheduling parameters for the process with *pid* as an instance of the\n\ @@ -4724,6 +4738,10 @@ Py_RETURN_NONE; } +#endif + +#ifdef HAVE_SCHED_RR_GET_INTERVAL + PyDoc_STRVAR(posix_sched_rr_get_interval__doc__, "sched_rr_get_interval(pid) -> float\n\n\ Return the round-robin quantum for the process with PID *pid* in seconds."); @@ -4741,6 +4759,8 @@ return PyFloat_FromDouble((double)interval.tv_sec + 1e-9*interval.tv_nsec); } +#endif + PyDoc_STRVAR(posix_sched_yield__doc__, "sched_yield()\n\n\ Voluntarily relinquish the CPU."); @@ -10054,11 +10074,21 @@ #ifdef HAVE_SCHED_H {"sched_get_priority_max", posix_sched_get_priority_max, METH_VARARGS, posix_sched_get_priority_max__doc__}, {"sched_get_priority_min", posix_sched_get_priority_min, METH_VARARGS, posix_sched_get_priority_min__doc__}, +#ifdef HAVE_SCHED_SETPARAM {"sched_getparam", posix_sched_getparam, METH_VARARGS, posix_sched_getparam__doc__}, +#endif +#ifdef HAVE_SCHED_SETSCHEDULER {"sched_getscheduler", posix_sched_getscheduler, METH_VARARGS, posix_sched_getscheduler__doc__}, +#endif +#ifdef HAVE_SCHED_RR_GET_INTERVAL {"sched_rr_get_interval", posix_sched_rr_get_interval, METH_VARARGS, posix_sched_rr_get_interval__doc__}, +#endif +#ifdef HAVE_SCHED_SETPARAM {"sched_setparam", posix_sched_setparam, METH_VARARGS, posix_sched_setparam__doc__}, +#endif +#ifdef HAVE_SCHED_SETSCHEDULER {"sched_setscheduler", posix_sched_setscheduler, METH_VARARGS, posix_sched_setscheduler__doc__}, +#endif {"sched_yield", posix_sched_yield, METH_NOARGS, posix_sched_yield__doc__}, #ifdef HAVE_SCHED_SETAFFINITY {"sched_setaffinity", posix_sched_setaffinity, METH_VARARGS, posix_sched_setaffinity__doc__}, diff --git a/configure b/configure --- a/configure +++ b/configure @@ -9339,7 +9339,7 @@ select sem_open sem_timedwait sem_getvalue sem_unlink sendfile setegid seteuid \ setgid sethostname \ setlocale setregid setreuid setresuid setresgid setsid setpgid setpgrp setpriority setuid setvbuf \ - sched_setaffinity \ + sched_setaffinity sched_setscheduler sched_setparam sched_rr_get_interval \ sigaction sigaltstack siginterrupt sigpending sigrelse \ sigtimedwait sigwait sigwaitinfo snprintf strftime strlcpy symlinkat sync \ sysconf tcgetpgrp tcsetpgrp tempnam timegm times tmpfile tmpnam tmpnam_r \ diff --git a/configure.in b/configure.in --- a/configure.in +++ b/configure.in @@ -2537,7 +2537,7 @@ select sem_open sem_timedwait sem_getvalue sem_unlink sendfile setegid seteuid \ setgid sethostname \ setlocale setregid setreuid setresuid setresgid setsid setpgid setpgrp setpriority setuid setvbuf \ - sched_setaffinity \ + sched_setaffinity sched_setscheduler sched_setparam sched_rr_get_interval \ sigaction sigaltstack siginterrupt sigpending sigrelse \ sigtimedwait sigwait sigwaitinfo snprintf strftime strlcpy symlinkat sync \ sysconf tcgetpgrp tcsetpgrp tempnam timegm times tmpfile tmpnam tmpnam_r \ diff --git a/pyconfig.h.in b/pyconfig.h.in --- a/pyconfig.h.in +++ b/pyconfig.h.in @@ -653,9 +653,18 @@ /* Define to 1 if you have the header file. */ #undef HAVE_SCHED_H +/* Define to 1 if you have the `sched_rr_get_interval' function. */ +#undef HAVE_SCHED_RR_GET_INTERVAL + /* Define to 1 if you have the `sched_setaffinity' function. */ #undef HAVE_SCHED_SETAFFINITY +/* Define to 1 if you have the `sched_setparam' function. */ +#undef HAVE_SCHED_SETPARAM + +/* Define to 1 if you have the `sched_setscheduler' function. */ +#undef HAVE_SCHED_SETSCHEDULER + /* Define to 1 if you have the `select' function. */ #undef HAVE_SELECT -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Wed Aug 3 01:11:49 2011 From: python-checkins at python.org (benjamin.peterson) Date: Wed, 03 Aug 2011 01:11:49 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_fix_sched_defines?= Message-ID: http://hg.python.org/cpython/rev/a8868c6ede9e changeset: 71709:a8868c6ede9e user: Benjamin Peterson date: Tue Aug 02 18:11:38 2011 -0500 summary: fix sched defines files: Modules/posixmodule.c | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Modules/posixmodule.c b/Modules/posixmodule.c --- a/Modules/posixmodule.c +++ b/Modules/posixmodule.c @@ -10950,7 +10950,7 @@ # endif #endif -#ifdef HAVE_SCHED_H +#if defined(HAVE_SCHED_SETPARAM) || defined(HAVE_SCHED_SETSCHEDULER) sched_param_desc.name = MODNAME ".sched_param"; PyStructSequence_InitType(&SchedParamType, &sched_param_desc); SchedParamType.tp_new = sched_param_new; -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Wed Aug 3 01:22:51 2011 From: python-checkins at python.org (benjamin.peterson) Date: Wed, 03 Aug 2011 01:22:51 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_comparing_on_number_of_cpus?= =?utf8?q?_makes_more_sense?= Message-ID: http://hg.python.org/cpython/rev/ff8565022f18 changeset: 71710:ff8565022f18 user: Benjamin Peterson date: Tue Aug 02 18:22:30 2011 -0500 summary: comparing on number of cpus makes more sense files: Modules/posixmodule.c | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Modules/posixmodule.c b/Modules/posixmodule.c --- a/Modules/posixmodule.c +++ b/Modules/posixmodule.c @@ -4947,7 +4947,7 @@ if (!res) \ return NULL; \ } \ - if (Py_TYPE(right) != &cpu_set_type || left->size != right->size) { \ + if (Py_TYPE(right) != &cpu_set_type || left->ncpus != right->ncpus) { \ Py_DECREF(res); \ Py_INCREF(Py_NotImplemented); \ return Py_NotImplemented; \ -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Wed Aug 3 01:34:39 2011 From: python-checkins at python.org (benjamin.peterson) Date: Wed, 03 Aug 2011 01:34:39 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_I_have_been_spoiled_by_chai?= =?utf8?q?ned_comparisons_in_python?= Message-ID: http://hg.python.org/cpython/rev/b1cc72f66ec9 changeset: 71711:b1cc72f66ec9 user: Benjamin Peterson date: Tue Aug 02 18:34:30 2011 -0500 summary: I have been spoiled by chained comparisons in python files: Modules/posixmodule.c | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Modules/posixmodule.c b/Modules/posixmodule.c --- a/Modules/posixmodule.c +++ b/Modules/posixmodule.c @@ -4952,7 +4952,7 @@ Py_INCREF(Py_NotImplemented); \ return Py_NotImplemented; \ } \ - assert(left->size == right->size == res->size); \ + assert(left->size == right->size && right->size == res->size); \ op(res->size, res->set, left->set, right->set); \ return (PyObject *)res; \ } \ -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Wed Aug 3 01:40:55 2011 From: python-checkins at python.org (benjamin.peterson) Date: Wed, 03 Aug 2011 01:40:55 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_only_add_sched=5Fparam_if_i?= =?utf8?q?t_is_initialized?= Message-ID: http://hg.python.org/cpython/rev/0fe26fbca44d changeset: 71712:0fe26fbca44d user: Benjamin Peterson date: Tue Aug 02 18:40:46 2011 -0500 summary: only add sched_param if it is initialized files: Modules/posixmodule.c | 3 +++ 1 files changed, 3 insertions(+), 0 deletions(-) diff --git a/Modules/posixmodule.c b/Modules/posixmodule.c --- a/Modules/posixmodule.c +++ b/Modules/posixmodule.c @@ -10965,8 +10965,11 @@ Py_INCREF((PyObject*) &StatVFSResultType); PyModule_AddObject(m, "statvfs_result", (PyObject*) &StatVFSResultType); + +#if defined(HAVE_SCHED_SETPARAM) || defined(HAVE_SCHED_SETSCHEDULER) Py_INCREF(&SchedParamType); PyModule_AddObject(m, "sched_param", (PyObject *)&SchedParamType); +#endif initialized = 1; #ifdef __APPLE__ -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Wed Aug 3 01:42:24 2011 From: python-checkins at python.org (benjamin.peterson) Date: Wed, 03 Aug 2011 01:42:24 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_avoid_warning_when_SchedPar?= =?utf8?q?amType_is_not_used?= Message-ID: http://hg.python.org/cpython/rev/c39804f08e90 changeset: 71713:c39804f08e90 user: Benjamin Peterson date: Tue Aug 02 18:42:14 2011 -0500 summary: avoid warning when SchedParamType is not used files: Modules/posixmodule.c | 2 ++ 1 files changed, 2 insertions(+), 0 deletions(-) diff --git a/Modules/posixmodule.c b/Modules/posixmodule.c --- a/Modules/posixmodule.c +++ b/Modules/posixmodule.c @@ -1609,7 +1609,9 @@ static int initialized; static PyTypeObject StatResultType; static PyTypeObject StatVFSResultType; +#if defined(HAVE_SCHED_SETPARAM) || defined(HAVE_SCHED_SETSCHEDULER) static PyTypeObject SchedParamType; +#endif static newfunc structseq_new; static PyObject * -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Wed Aug 3 01:45:06 2011 From: python-checkins at python.org (senthil.kumaran) Date: Wed, 03 Aug 2011 01:45:06 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=283=2E2=29=3A_Fix_closes_issu?= =?utf8?q?e12663_-_Correcting_the_ArgumentParser=2Eerror_description=2E_Pa?= =?utf8?q?tch?= Message-ID: http://hg.python.org/cpython/rev/5ff56995976c changeset: 71714:5ff56995976c branch: 3.2 parent: 71697:a70cdec027e7 user: Senthil Kumaran date: Wed Aug 03 07:42:18 2011 +0800 summary: Fix closes issue12663 - Correcting the ArgumentParser.error description. Patch by Sandro Tosi. files: Doc/library/argparse.rst | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Doc/library/argparse.rst b/Doc/library/argparse.rst --- a/Doc/library/argparse.rst +++ b/Doc/library/argparse.rst @@ -1778,7 +1778,7 @@ .. method:: ArgumentParser.error(message) This method prints a usage message including the *message* to the - standard output and terminates the program with a status code of 2. + standard error and terminates the program with a status code of 2. .. _upgrading-optparse-code: -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Wed Aug 3 01:45:07 2011 From: python-checkins at python.org (senthil.kumaran) Date: Wed, 03 Aug 2011 01:45:07 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=28merge_3=2E2_-=3E_default=29?= =?utf8?q?=3A_Fix_closes_issue12663_-_Correcting_the_ArgumentParser=2Eerro?= =?utf8?q?r_description=2E_Patch?= Message-ID: http://hg.python.org/cpython/rev/a5b3d5051fc7 changeset: 71715:a5b3d5051fc7 parent: 71711:b1cc72f66ec9 parent: 71714:5ff56995976c user: Senthil Kumaran date: Wed Aug 03 07:43:01 2011 +0800 summary: Fix closes issue12663 - Correcting the ArgumentParser.error description. Patch by Sandro Tosi. files: Doc/library/argparse.rst | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Doc/library/argparse.rst b/Doc/library/argparse.rst --- a/Doc/library/argparse.rst +++ b/Doc/library/argparse.rst @@ -1797,7 +1797,7 @@ .. method:: ArgumentParser.error(message) This method prints a usage message including the *message* to the - standard output and terminates the program with a status code of 2. + standard error and terminates the program with a status code of 2. .. _upgrading-optparse-code: -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Wed Aug 3 01:45:08 2011 From: python-checkins at python.org (senthil.kumaran) Date: Wed, 03 Aug 2011 01:45:08 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=282=2E7=29=3A_Fix_closes_issu?= =?utf8?q?e12663_-_Correcting_the_ArgumentParser=2Eerror_description=2E_Pa?= =?utf8?q?tch?= Message-ID: http://hg.python.org/cpython/rev/1e8dc34ece30 changeset: 71716:1e8dc34ece30 branch: 2.7 parent: 71700:4d39e6f22bef user: Senthil Kumaran date: Wed Aug 03 07:43:52 2011 +0800 summary: Fix closes issue12663 - Correcting the ArgumentParser.error description. Patch by Sandro Tosi. files: Doc/library/argparse.rst | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Doc/library/argparse.rst b/Doc/library/argparse.rst --- a/Doc/library/argparse.rst +++ b/Doc/library/argparse.rst @@ -1760,7 +1760,7 @@ .. method:: ArgumentParser.error(message) This method prints a usage message including the *message* to the - standard output and terminates the program with a status code of 2. + standard error and terminates the program with a status code of 2. .. _argparse-from-optparse: -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Wed Aug 3 01:45:09 2011 From: python-checkins at python.org (senthil.kumaran) Date: Wed, 03 Aug 2011 01:45:09 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=28merge_default_-=3E_default?= =?utf8?b?KTogbWVyZ2UgaGVhZHMu?= Message-ID: http://hg.python.org/cpython/rev/42f791043f08 changeset: 71717:42f791043f08 parent: 71715:a5b3d5051fc7 parent: 71713:c39804f08e90 user: Senthil Kumaran date: Wed Aug 03 07:44:46 2011 +0800 summary: merge heads. files: Modules/posixmodule.c | 5 +++++ 1 files changed, 5 insertions(+), 0 deletions(-) diff --git a/Modules/posixmodule.c b/Modules/posixmodule.c --- a/Modules/posixmodule.c +++ b/Modules/posixmodule.c @@ -1609,7 +1609,9 @@ static int initialized; static PyTypeObject StatResultType; static PyTypeObject StatVFSResultType; +#if defined(HAVE_SCHED_SETPARAM) || defined(HAVE_SCHED_SETSCHEDULER) static PyTypeObject SchedParamType; +#endif static newfunc structseq_new; static PyObject * @@ -10965,8 +10967,11 @@ Py_INCREF((PyObject*) &StatVFSResultType); PyModule_AddObject(m, "statvfs_result", (PyObject*) &StatVFSResultType); + +#if defined(HAVE_SCHED_SETPARAM) || defined(HAVE_SCHED_SETSCHEDULER) Py_INCREF(&SchedParamType); PyModule_AddObject(m, "sched_param", (PyObject *)&SchedParamType); +#endif initialized = 1; #ifdef __APPLE__ -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Wed Aug 3 01:49:50 2011 From: python-checkins at python.org (benjamin.peterson) Date: Wed, 03 Aug 2011 01:49:50 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_bsd_doesn=27t_like_letting_?= =?utf8?q?normal_processes_set_the_scheduler?= Message-ID: http://hg.python.org/cpython/rev/fce751202cba changeset: 71718:fce751202cba parent: 71713:c39804f08e90 user: Benjamin Peterson date: Tue Aug 02 18:48:59 2011 -0500 summary: bsd doesn't like letting normal processes set the scheduler files: Lib/test/test_posix.py | 6 +++++- 1 files changed, 5 insertions(+), 1 deletions(-) diff --git a/Lib/test/test_posix.py b/Lib/test/test_posix.py --- a/Lib/test/test_posix.py +++ b/Lib/test/test_posix.py @@ -868,7 +868,11 @@ self.assertRaises(OSError, posix.sched_getparam, -1) param = posix.sched_getparam(0) self.assertIsInstance(param.sched_priority, int) - posix.sched_setscheduler(0, mine, param) + try: + posix.sched_setscheduler(0, mine, param) + except OSError as e: + if e.errno != errno.EPERM: + raise posix.sched_setparam(0, param) self.assertRaises(OSError, posix.sched_setparam, -1, param) self.assertRaises(OSError, posix.sched_setscheduler, -1, mine, param) -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Wed Aug 3 01:49:51 2011 From: python-checkins at python.org (benjamin.peterson) Date: Wed, 03 Aug 2011 01:49:51 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=28merge_default_-=3E_default?= =?utf8?q?=29=3A_merge_heads?= Message-ID: http://hg.python.org/cpython/rev/de5b5fa674b6 changeset: 71719:de5b5fa674b6 parent: 71718:fce751202cba parent: 71717:42f791043f08 user: Benjamin Peterson date: Tue Aug 02 18:49:38 2011 -0500 summary: merge heads files: Doc/library/argparse.rst | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Doc/library/argparse.rst b/Doc/library/argparse.rst --- a/Doc/library/argparse.rst +++ b/Doc/library/argparse.rst @@ -1797,7 +1797,7 @@ .. method:: ArgumentParser.error(message) This method prints a usage message including the *message* to the - standard output and terminates the program with a status code of 2. + standard error and terminates the program with a status code of 2. .. _upgrading-optparse-code: -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Wed Aug 3 02:56:20 2011 From: python-checkins at python.org (benjamin.peterson) Date: Wed, 03 Aug 2011 02:56:20 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_need_NULL_sentinel?= Message-ID: http://hg.python.org/cpython/rev/6dda8da2a19a changeset: 71720:6dda8da2a19a user: Benjamin Peterson date: Tue Aug 02 19:56:11 2011 -0500 summary: need NULL sentinel files: Modules/posixmodule.c | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Modules/posixmodule.c b/Modules/posixmodule.c --- a/Modules/posixmodule.c +++ b/Modules/posixmodule.c @@ -4616,7 +4616,7 @@ sched_param_new(PyTypeObject *type, PyObject *args, PyObject *kwargs) { PyObject *res, *priority; - static char *kwlist[] = {"sched_priority"}; + static char *kwlist[] = {"sched_priority", NULL}; if (!PyArg_ParseTupleAndKeywords(args, kwargs, "O:sched_param", kwlist, &priority)) return NULL; -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Wed Aug 3 04:21:41 2011 From: python-checkins at python.org (eli.bendersky) Date: Wed, 03 Aug 2011 04:21:41 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Issue_=2311049=3A_fix_test?= =?utf8?q?=5Fforget_on_installed_Python_-_add_os=2Ecurdir_to_sys=2Epath?= Message-ID: http://hg.python.org/cpython/rev/07d94cf3521e changeset: 71721:07d94cf3521e user: Eli Bendersky date: Wed Aug 03 05:18:33 2011 +0300 summary: Issue #11049: fix test_forget on installed Python - add os.curdir to sys.path files: Lib/test/test_support.py | 2 ++ 1 files changed, 2 insertions(+), 0 deletions(-) diff --git a/Lib/test/test_support.py b/Lib/test/test_support.py --- a/Lib/test/test_support.py +++ b/Lib/test/test_support.py @@ -58,6 +58,7 @@ mod_filename = TESTFN + '.py' with open(mod_filename, 'w') as f: print('foo = 1', file=f) + sys.path.insert(0, os.curdir) try: mod = __import__(TESTFN) self.assertIn(TESTFN, sys.modules) @@ -65,6 +66,7 @@ support.forget(TESTFN) self.assertNotIn(TESTFN, sys.modules) finally: + del sys.path[0] support.unlink(mod_filename) def test_HOST(self): -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Wed Aug 3 05:11:04 2011 From: python-checkins at python.org (benjamin.peterson) Date: Wed, 03 Aug 2011 05:11:04 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=28merge_default_-=3E_default?= =?utf8?q?=29=3A_merge_heads?= Message-ID: http://hg.python.org/cpython/rev/7d83d47cfb2e changeset: 71723:7d83d47cfb2e parent: 71722:4e4e229b4255 parent: 71721:07d94cf3521e user: Benjamin Peterson date: Tue Aug 02 22:09:56 2011 -0500 summary: merge heads files: Lib/test/test_support.py | 2 ++ 1 files changed, 2 insertions(+), 0 deletions(-) diff --git a/Lib/test/test_support.py b/Lib/test/test_support.py --- a/Lib/test/test_support.py +++ b/Lib/test/test_support.py @@ -58,6 +58,7 @@ mod_filename = TESTFN + '.py' with open(mod_filename, 'w') as f: print('foo = 1', file=f) + sys.path.insert(0, os.curdir) try: mod = __import__(TESTFN) self.assertIn(TESTFN, sys.modules) @@ -65,6 +66,7 @@ support.forget(TESTFN) self.assertNotIn(TESTFN, sys.modules) finally: + del sys.path[0] support.unlink(mod_filename) def test_HOST(self): -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Wed Aug 3 05:11:04 2011 From: python-checkins at python.org (benjamin.peterson) Date: Wed, 03 Aug 2011 05:11:04 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_OSX_doesn=27t_check_sched?= =?utf8?q?=5Fget=5Fpriority=5F=28min/max=29_argument?= Message-ID: http://hg.python.org/cpython/rev/4e4e229b4255 changeset: 71722:4e4e229b4255 parent: 71720:6dda8da2a19a user: Benjamin Peterson date: Tue Aug 02 22:09:37 2011 -0500 summary: OSX doesn't check sched_get_priority_(min/max) argument files: Lib/test/test_posix.py | 6 ++++-- 1 files changed, 4 insertions(+), 2 deletions(-) diff --git a/Lib/test/test_posix.py b/Lib/test/test_posix.py --- a/Lib/test/test_posix.py +++ b/Lib/test/test_posix.py @@ -848,8 +848,10 @@ self.assertIsInstance(lo, int) self.assertIsInstance(hi, int) self.assertGreaterEqual(hi, lo) - self.assertRaises(OSError, posix.sched_get_priority_min, -23) - self.assertRaises(OSError, posix.sched_get_priority_max, -23) + # OSX evidently just returns 15 without checking the argument. + if sys.platform != "darwin": + self.assertRaises(OSError, posix.sched_get_priority_min, -23) + self.assertRaises(OSError, posix.sched_get_priority_max, -23) @unittest.skipUnless(hasattr(posix, 'sched_setscheduler'), "can't change scheduler") def test_get_and_set_scheduler_and_param(self): -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Wed Aug 3 05:11:05 2011 From: python-checkins at python.org (benjamin.peterson) Date: Wed, 03 Aug 2011 05:11:05 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_fix_indentation?= Message-ID: http://hg.python.org/cpython/rev/32b1bc6d1f92 changeset: 71724:32b1bc6d1f92 user: Benjamin Peterson date: Tue Aug 02 22:10:55 2011 -0500 summary: fix indentation files: Lib/test/test_posix.py | 4 ++-- 1 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Lib/test/test_posix.py b/Lib/test/test_posix.py --- a/Lib/test/test_posix.py +++ b/Lib/test/test_posix.py @@ -850,8 +850,8 @@ self.assertGreaterEqual(hi, lo) # OSX evidently just returns 15 without checking the argument. if sys.platform != "darwin": - self.assertRaises(OSError, posix.sched_get_priority_min, -23) - self.assertRaises(OSError, posix.sched_get_priority_max, -23) + self.assertRaises(OSError, posix.sched_get_priority_min, -23) + self.assertRaises(OSError, posix.sched_get_priority_max, -23) @unittest.skipUnless(hasattr(posix, 'sched_setscheduler'), "can't change scheduler") def test_get_and_set_scheduler_and_param(self): -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Wed Aug 3 05:15:48 2011 From: python-checkins at python.org (benjamin.peterson) Date: Wed, 03 Aug 2011 05:15:48 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_fix_punctuation?= Message-ID: http://hg.python.org/cpython/rev/d01bf280ac2e changeset: 71725:d01bf280ac2e user: Benjamin Peterson date: Tue Aug 02 22:15:40 2011 -0500 summary: fix punctuation files: Lib/test/test_posix.py | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Lib/test/test_posix.py b/Lib/test/test_posix.py --- a/Lib/test/test_posix.py +++ b/Lib/test/test_posix.py @@ -832,7 +832,7 @@ requires_sched_h = unittest.skipUnless(hasattr(posix, 'sched_yield'), "don't have scheduling support") requires_sched_affinity = unittest.skipUnless(hasattr(posix, 'cpu_set'), - "dont' have sched affinity support") + "don't have sched affinity support") @requires_sched_h def test_sched_yield(self): -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Wed Aug 3 05:19:24 2011 From: python-checkins at python.org (benjamin.peterson) Date: Wed, 03 Aug 2011 05:19:24 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_handle_sched=5Frr=5Fget=5Fi?= =?utf8?q?nterval_not_working_on_current?= Message-ID: http://hg.python.org/cpython/rev/4862df5cbedb changeset: 71726:4862df5cbedb user: Benjamin Peterson date: Tue Aug 02 22:19:14 2011 -0500 summary: handle sched_rr_get_interval not working on current files: Lib/test/test_posix.py | 9 ++++++++- 1 files changed, 8 insertions(+), 1 deletions(-) diff --git a/Lib/test/test_posix.py b/Lib/test/test_posix.py --- a/Lib/test/test_posix.py +++ b/Lib/test/test_posix.py @@ -890,7 +890,14 @@ @unittest.skipUnless(hasattr(posix, "sched_rr_get_interval"), "no function") def test_sched_rr_get_interval(self): - interval = posix.sched_rr_get_interval(0) + try: + interval = posix.sched_rr_get_interval(0) + except OSError as e: + # This likely means that sched_rr_get_interval is only valid for + # processes with the SCHED_RR scheduler in effect. + if e.errno != errno.EINVAL: + raise + self.skipTest("only works on SCHED_RR processes") self.assertIsInstance(interval, float) # Reasonable constraints, I think. self.assertGreaterEqual(interval, 0.) -- Repository URL: http://hg.python.org/cpython From solipsis at pitrou.net Wed Aug 3 05:21:48 2011 From: solipsis at pitrou.net (solipsis at pitrou.net) Date: Wed, 03 Aug 2011 05:21:48 +0200 Subject: [Python-checkins] Daily reference leaks (6dda8da2a19a): sum=0 Message-ID: results for 6dda8da2a19a on branch "default" -------------------------------------------- Command line was: ['./python', '-m', 'test.regrtest', '-uall', '-R', '3:3:/home/antoine/cpython/refleaks/reflogNr9416', '-x'] From python-checkins at python.org Wed Aug 3 08:28:27 2011 From: python-checkins at python.org (georg.brandl) Date: Wed, 03 Aug 2011 08:28:27 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=283=2E2=29=3A_Fix_spacing_in_?= =?utf8?q?string_literal=2E?= Message-ID: http://hg.python.org/cpython/rev/0a9b0712f51a changeset: 71727:0a9b0712f51a branch: 3.2 parent: 71714:5ff56995976c user: Georg Brandl date: Wed Aug 03 08:27:00 2011 +0200 summary: Fix spacing in string literal. files: Lib/http/client.py | 4 ++-- 1 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Lib/http/client.py b/Lib/http/client.py --- a/Lib/http/client.py +++ b/Lib/http/client.py @@ -777,8 +777,8 @@ for d in data: self.sock.sendall(d) else: - raise TypeError("data should be a bytes-like object\ - or an iterable, got %r " % type(data)) + raise TypeError("data should be a bytes-like object " + "or an iterable, got %r" % type(data)) def _output(self, s): """Add a line of output to the current request buffer. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Wed Aug 3 08:28:27 2011 From: python-checkins at python.org (georg.brandl) Date: Wed, 03 Aug 2011 08:28:27 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=28merge_3=2E2_-=3E_default=29?= =?utf8?q?=3A_Merge_with_3=2E2=2E?= Message-ID: http://hg.python.org/cpython/rev/597645db3cec changeset: 71728:597645db3cec parent: 71726:4862df5cbedb parent: 71727:0a9b0712f51a user: Georg Brandl date: Wed Aug 03 08:29:12 2011 +0200 summary: Merge with 3.2. files: Lib/http/client.py | 4 ++-- 1 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Lib/http/client.py b/Lib/http/client.py --- a/Lib/http/client.py +++ b/Lib/http/client.py @@ -777,8 +777,8 @@ for d in data: self.sock.sendall(d) else: - raise TypeError("data should be a bytes-like object\ - or an iterable, got %r " % type(data)) + raise TypeError("data should be a bytes-like object " + "or an iterable, got %r" % type(data)) def _output(self, s): """Add a line of output to the current request buffer. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Wed Aug 3 12:47:22 2011 From: python-checkins at python.org (senthil.kumaran) Date: Wed, 03 Aug 2011 12:47:22 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=282=2E7=29=3A_Fix_closes_issu?= =?utf8?q?e12683_-_urljoin_to_work_with_relative_join_of_svn_scheme=2E?= Message-ID: http://hg.python.org/cpython/rev/5278aa2d9d9a changeset: 71729:5278aa2d9d9a branch: 2.7 parent: 71716:1e8dc34ece30 user: Senthil Kumaran date: Wed Aug 03 18:31:59 2011 +0800 summary: Fix closes issue12683 - urljoin to work with relative join of svn scheme. files: Lib/test/test_urlparse.py | 1 + Lib/urlparse.py | 3 ++- Misc/NEWS | 3 +++ 3 files changed, 6 insertions(+), 1 deletions(-) diff --git a/Lib/test/test_urlparse.py b/Lib/test/test_urlparse.py --- a/Lib/test/test_urlparse.py +++ b/Lib/test/test_urlparse.py @@ -339,6 +339,7 @@ self.checkJoin('http:///', '..','http:///') self.checkJoin('', 'http://a/b/c/g?y/./x','http://a/b/c/g?y/./x') self.checkJoin('', 'http://a/./g', 'http://a/./g') + self.checkJoin('svn://pathtorepo/dir1','dir2','svn://pathtorepo/dir2') def test_RFC2732(self): for url, hostname, port in [ diff --git a/Lib/urlparse.py b/Lib/urlparse.py --- a/Lib/urlparse.py +++ b/Lib/urlparse.py @@ -34,7 +34,8 @@ # A classification of schemes ('' means apply by default) uses_relative = ['ftp', 'http', 'gopher', 'nntp', 'imap', 'wais', 'file', 'https', 'shttp', 'mms', - 'prospero', 'rtsp', 'rtspu', '', 'sftp'] + 'prospero', 'rtsp', 'rtspu', '', 'sftp', + 'svn'] uses_netloc = ['ftp', 'http', 'gopher', 'nntp', 'telnet', 'imap', 'wais', 'file', 'mms', 'https', 'shttp', 'snews', 'prospero', 'rtsp', 'rtspu', 'rsync', '', diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -37,6 +37,9 @@ Library ------- +- Issue #12683: urlparse updated to include svn as schemes that uses relative + paths. (svn from 1.5 onwards support relative path). + - Issue #11933: Fix incorrect mtime comparison in distutils. - Issues #11104, #8688: Fix the behavior of distutils' sdist command with -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Wed Aug 3 12:47:23 2011 From: python-checkins at python.org (senthil.kumaran) Date: Wed, 03 Aug 2011 12:47:23 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=283=2E2=29=3A_Fix_closes_issu?= =?utf8?q?e12683_-_urljoin_to_work_with_relative_join_of_svn_scheme=2E?= Message-ID: http://hg.python.org/cpython/rev/57a836eb6916 changeset: 71730:57a836eb6916 branch: 3.2 parent: 71714:5ff56995976c user: Senthil Kumaran date: Wed Aug 03 18:37:22 2011 +0800 summary: Fix closes issue12683 - urljoin to work with relative join of svn scheme. files: Lib/test/test_urlparse.py | 2 ++ Lib/urllib/parse.py | 3 ++- Misc/NEWS | 3 +++ 3 files changed, 7 insertions(+), 1 deletions(-) diff --git a/Lib/test/test_urlparse.py b/Lib/test/test_urlparse.py --- a/Lib/test/test_urlparse.py +++ b/Lib/test/test_urlparse.py @@ -371,6 +371,8 @@ self.checkJoin('http:///', '..','http:///') self.checkJoin('', 'http://a/b/c/g?y/./x','http://a/b/c/g?y/./x') self.checkJoin('', 'http://a/./g', 'http://a/./g') + self.checkJoin('svn://pathtorepo/dir1', 'dir2', 'svn://pathtorepo/dir2') + self.checkJoin('svn://pathtorepo/dir1', 'dir2', 'svn://pathtorepo/dir2') def test_RFC2732(self): str_cases = [ diff --git a/Lib/urllib/parse.py b/Lib/urllib/parse.py --- a/Lib/urllib/parse.py +++ b/Lib/urllib/parse.py @@ -38,7 +38,8 @@ # A classification of schemes ('' means apply by default) uses_relative = ['ftp', 'http', 'gopher', 'nntp', 'imap', 'wais', 'file', 'https', 'shttp', 'mms', - 'prospero', 'rtsp', 'rtspu', '', 'sftp'] + 'prospero', 'rtsp', 'rtspu', '', 'sftp', + 'svn', 'svn+ssh'] uses_netloc = ['ftp', 'http', 'gopher', 'nntp', 'telnet', 'imap', 'wais', 'file', 'mms', 'https', 'shttp', 'snews', 'prospero', 'rtsp', 'rtspu', 'rsync', '', diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -41,6 +41,9 @@ Library ------- +- Issue #12683: urlparse updated to include svn as schemes that uses relative + paths. (svn from 1.5 onwards support relative path). + - Issues #11104, #8688: Fix the behavior of distutils' sdist command with manually-maintained MANIFEST files. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Wed Aug 3 12:47:24 2011 From: python-checkins at python.org (senthil.kumaran) Date: Wed, 03 Aug 2011 12:47:24 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=282=2E7=29=3A_include_svn+ssh?= =?utf8?q?_as_well=2E?= Message-ID: http://hg.python.org/cpython/rev/aa9342093199 changeset: 71731:aa9342093199 branch: 2.7 parent: 71729:5278aa2d9d9a user: Senthil Kumaran date: Wed Aug 03 18:40:18 2011 +0800 summary: include svn+ssh as well. files: Lib/urlparse.py | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Lib/urlparse.py b/Lib/urlparse.py --- a/Lib/urlparse.py +++ b/Lib/urlparse.py @@ -35,7 +35,7 @@ uses_relative = ['ftp', 'http', 'gopher', 'nntp', 'imap', 'wais', 'file', 'https', 'shttp', 'mms', 'prospero', 'rtsp', 'rtspu', '', 'sftp', - 'svn'] + 'svn', 'svn+ssh'] uses_netloc = ['ftp', 'http', 'gopher', 'nntp', 'telnet', 'imap', 'wais', 'file', 'mms', 'https', 'shttp', 'snews', 'prospero', 'rtsp', 'rtspu', 'rsync', '', -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Wed Aug 3 12:47:25 2011 From: python-checkins at python.org (senthil.kumaran) Date: Wed, 03 Aug 2011 12:47:25 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=28merge_3=2E2_-=3E_default=29?= =?utf8?q?=3A_merge_from_3=2E2_-_Fix_closes_issue12683_-_urljoin_to_work_w?= =?utf8?q?ith_relative_join_of?= Message-ID: http://hg.python.org/cpython/rev/a3981d0c4d9b changeset: 71732:a3981d0c4d9b parent: 71719:de5b5fa674b6 parent: 71730:57a836eb6916 user: Senthil Kumaran date: Wed Aug 03 18:44:10 2011 +0800 summary: merge from 3.2 - Fix closes issue12683 - urljoin to work with relative join of svn scheme. files: Lib/test/test_urlparse.py | 2 ++ Lib/urllib/parse.py | 3 ++- Misc/NEWS | 3 +++ 3 files changed, 7 insertions(+), 1 deletions(-) diff --git a/Lib/test/test_urlparse.py b/Lib/test/test_urlparse.py --- a/Lib/test/test_urlparse.py +++ b/Lib/test/test_urlparse.py @@ -371,6 +371,8 @@ self.checkJoin('http:///', '..','http:///') self.checkJoin('', 'http://a/b/c/g?y/./x','http://a/b/c/g?y/./x') self.checkJoin('', 'http://a/./g', 'http://a/./g') + self.checkJoin('svn://pathtorepo/dir1', 'dir2', 'svn://pathtorepo/dir2') + self.checkJoin('svn://pathtorepo/dir1', 'dir2', 'svn://pathtorepo/dir2') def test_RFC2732(self): str_cases = [ diff --git a/Lib/urllib/parse.py b/Lib/urllib/parse.py --- a/Lib/urllib/parse.py +++ b/Lib/urllib/parse.py @@ -38,7 +38,8 @@ # A classification of schemes ('' means apply by default) uses_relative = ['ftp', 'http', 'gopher', 'nntp', 'imap', 'wais', 'file', 'https', 'shttp', 'mms', - 'prospero', 'rtsp', 'rtspu', '', 'sftp'] + 'prospero', 'rtsp', 'rtspu', '', 'sftp', + 'svn', 'svn+ssh'] uses_netloc = ['ftp', 'http', 'gopher', 'nntp', 'telnet', 'imap', 'wais', 'file', 'mms', 'https', 'shttp', 'snews', 'prospero', 'rtsp', 'rtspu', 'rsync', '', diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -249,6 +249,9 @@ Library ------- +- Issue #12683: urlparse updated to include svn as schemes that uses relative + paths. (svn from 1.5 onwards support relative path). + - Issue #12655: Expose functions from sched.h in the os module: sched_yield(), sched_setscheduler(), sched_getscheduler(), sched_setparam(), sched_get_min_priority(), sched_get_max_priority(), sched_rr_get_interval(), -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Wed Aug 3 12:47:26 2011 From: python-checkins at python.org (senthil.kumaran) Date: Wed, 03 Aug 2011 12:47:26 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=28merge_default_-=3E_default?= =?utf8?q?=29=3A_merge_heads?= Message-ID: http://hg.python.org/cpython/rev/60353231df0c changeset: 71733:60353231df0c parent: 71732:a3981d0c4d9b parent: 71728:597645db3cec user: Senthil Kumaran date: Wed Aug 03 18:45:02 2011 +0800 summary: merge heads files: Lib/http/client.py | 4 ++-- Lib/test/test_posix.py | 17 +++++++++++++---- Lib/test/test_support.py | 2 ++ Modules/posixmodule.c | 2 +- 4 files changed, 18 insertions(+), 7 deletions(-) diff --git a/Lib/http/client.py b/Lib/http/client.py --- a/Lib/http/client.py +++ b/Lib/http/client.py @@ -777,8 +777,8 @@ for d in data: self.sock.sendall(d) else: - raise TypeError("data should be a bytes-like object\ - or an iterable, got %r " % type(data)) + raise TypeError("data should be a bytes-like object " + "or an iterable, got %r" % type(data)) def _output(self, s): """Add a line of output to the current request buffer. diff --git a/Lib/test/test_posix.py b/Lib/test/test_posix.py --- a/Lib/test/test_posix.py +++ b/Lib/test/test_posix.py @@ -832,7 +832,7 @@ requires_sched_h = unittest.skipUnless(hasattr(posix, 'sched_yield'), "don't have scheduling support") requires_sched_affinity = unittest.skipUnless(hasattr(posix, 'cpu_set'), - "dont' have sched affinity support") + "don't have sched affinity support") @requires_sched_h def test_sched_yield(self): @@ -848,8 +848,10 @@ self.assertIsInstance(lo, int) self.assertIsInstance(hi, int) self.assertGreaterEqual(hi, lo) - self.assertRaises(OSError, posix.sched_get_priority_min, -23) - self.assertRaises(OSError, posix.sched_get_priority_max, -23) + # OSX evidently just returns 15 without checking the argument. + if sys.platform != "darwin": + self.assertRaises(OSError, posix.sched_get_priority_min, -23) + self.assertRaises(OSError, posix.sched_get_priority_max, -23) @unittest.skipUnless(hasattr(posix, 'sched_setscheduler'), "can't change scheduler") def test_get_and_set_scheduler_and_param(self): @@ -888,7 +890,14 @@ @unittest.skipUnless(hasattr(posix, "sched_rr_get_interval"), "no function") def test_sched_rr_get_interval(self): - interval = posix.sched_rr_get_interval(0) + try: + interval = posix.sched_rr_get_interval(0) + except OSError as e: + # This likely means that sched_rr_get_interval is only valid for + # processes with the SCHED_RR scheduler in effect. + if e.errno != errno.EINVAL: + raise + self.skipTest("only works on SCHED_RR processes") self.assertIsInstance(interval, float) # Reasonable constraints, I think. self.assertGreaterEqual(interval, 0.) diff --git a/Lib/test/test_support.py b/Lib/test/test_support.py --- a/Lib/test/test_support.py +++ b/Lib/test/test_support.py @@ -58,6 +58,7 @@ mod_filename = TESTFN + '.py' with open(mod_filename, 'w') as f: print('foo = 1', file=f) + sys.path.insert(0, os.curdir) try: mod = __import__(TESTFN) self.assertIn(TESTFN, sys.modules) @@ -65,6 +66,7 @@ support.forget(TESTFN) self.assertNotIn(TESTFN, sys.modules) finally: + del sys.path[0] support.unlink(mod_filename) def test_HOST(self): diff --git a/Modules/posixmodule.c b/Modules/posixmodule.c --- a/Modules/posixmodule.c +++ b/Modules/posixmodule.c @@ -4616,7 +4616,7 @@ sched_param_new(PyTypeObject *type, PyObject *args, PyObject *kwargs) { PyObject *res, *priority; - static char *kwlist[] = {"sched_priority"}; + static char *kwlist[] = {"sched_priority", NULL}; if (!PyArg_ParseTupleAndKeywords(args, kwargs, "O:sched_param", kwlist, &priority)) return NULL; -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Wed Aug 3 12:47:26 2011 From: python-checkins at python.org (senthil.kumaran) Date: Wed, 03 Aug 2011 12:47:26 +0200 Subject: [Python-checkins] =?utf8?b?Y3B5dGhvbiAobWVyZ2UgMy4yIC0+IDMuMik6?= =?utf8?q?_merged_heads?= Message-ID: http://hg.python.org/cpython/rev/23e4ed4c8bf9 changeset: 71734:23e4ed4c8bf9 branch: 3.2 parent: 71730:57a836eb6916 parent: 71727:0a9b0712f51a user: Senthil Kumaran date: Wed Aug 03 18:46:24 2011 +0800 summary: merged heads files: Lib/http/client.py | 4 ++-- 1 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Lib/http/client.py b/Lib/http/client.py --- a/Lib/http/client.py +++ b/Lib/http/client.py @@ -777,8 +777,8 @@ for d in data: self.sock.sendall(d) else: - raise TypeError("data should be a bytes-like object\ - or an iterable, got %r " % type(data)) + raise TypeError("data should be a bytes-like object " + "or an iterable, got %r" % type(data)) def _output(self, s): """Add a line of output to the current request buffer. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Wed Aug 3 15:54:54 2011 From: python-checkins at python.org (benjamin.peterson) Date: Wed, 03 Aug 2011 15:54:54 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_fix_arg_strings?= Message-ID: http://hg.python.org/cpython/rev/8e39ae75a639 changeset: 71735:8e39ae75a639 parent: 71726:4862df5cbedb user: Benjamin Peterson date: Wed Aug 03 08:54:26 2011 -0500 summary: fix arg strings files: Modules/posixmodule.c | 4 ++-- 1 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Modules/posixmodule.c b/Modules/posixmodule.c --- a/Modules/posixmodule.c +++ b/Modules/posixmodule.c @@ -5074,7 +5074,7 @@ pid_t pid; Py_cpu_set *cpu_set; - if (!PyArg_ParseTuple(args, _Py_PARSE_PID "O!|sched_setaffinity", + if (!PyArg_ParseTuple(args, _Py_PARSE_PID "O!:schbed_setaffinity", &pid, &cpu_set_type, &cpu_set)) return NULL; if (sched_setaffinity(pid, cpu_set->size, cpu_set->set)) @@ -5094,7 +5094,7 @@ int ncpus; Py_cpu_set *res; - if (!PyArg_ParseTuple(args, _Py_PARSE_PID "i|sched_getaffinity", + if (!PyArg_ParseTuple(args, _Py_PARSE_PID "i:sched_getaffinity", &pid, &ncpus)) return NULL; res = make_new_cpu_set(&cpu_set_type, ncpus); -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Wed Aug 3 15:54:55 2011 From: python-checkins at python.org (benjamin.peterson) Date: Wed, 03 Aug 2011 15:54:55 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=28merge_default_-=3E_default?= =?utf8?q?=29=3A_merge_heads?= Message-ID: http://hg.python.org/cpython/rev/e8a53f4643ce changeset: 71736:e8a53f4643ce parent: 71735:8e39ae75a639 parent: 71733:60353231df0c user: Benjamin Peterson date: Wed Aug 03 08:54:44 2011 -0500 summary: merge heads files: Lib/http/client.py | 4 ++-- Lib/test/test_urlparse.py | 2 ++ Lib/urllib/parse.py | 3 ++- Misc/NEWS | 3 +++ 4 files changed, 9 insertions(+), 3 deletions(-) diff --git a/Lib/http/client.py b/Lib/http/client.py --- a/Lib/http/client.py +++ b/Lib/http/client.py @@ -777,8 +777,8 @@ for d in data: self.sock.sendall(d) else: - raise TypeError("data should be a bytes-like object\ - or an iterable, got %r " % type(data)) + raise TypeError("data should be a bytes-like object " + "or an iterable, got %r" % type(data)) def _output(self, s): """Add a line of output to the current request buffer. diff --git a/Lib/test/test_urlparse.py b/Lib/test/test_urlparse.py --- a/Lib/test/test_urlparse.py +++ b/Lib/test/test_urlparse.py @@ -371,6 +371,8 @@ self.checkJoin('http:///', '..','http:///') self.checkJoin('', 'http://a/b/c/g?y/./x','http://a/b/c/g?y/./x') self.checkJoin('', 'http://a/./g', 'http://a/./g') + self.checkJoin('svn://pathtorepo/dir1', 'dir2', 'svn://pathtorepo/dir2') + self.checkJoin('svn://pathtorepo/dir1', 'dir2', 'svn://pathtorepo/dir2') def test_RFC2732(self): str_cases = [ diff --git a/Lib/urllib/parse.py b/Lib/urllib/parse.py --- a/Lib/urllib/parse.py +++ b/Lib/urllib/parse.py @@ -38,7 +38,8 @@ # A classification of schemes ('' means apply by default) uses_relative = ['ftp', 'http', 'gopher', 'nntp', 'imap', 'wais', 'file', 'https', 'shttp', 'mms', - 'prospero', 'rtsp', 'rtspu', '', 'sftp'] + 'prospero', 'rtsp', 'rtspu', '', 'sftp', + 'svn', 'svn+ssh'] uses_netloc = ['ftp', 'http', 'gopher', 'nntp', 'telnet', 'imap', 'wais', 'file', 'mms', 'https', 'shttp', 'snews', 'prospero', 'rtsp', 'rtspu', 'rsync', '', diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -249,6 +249,9 @@ Library ------- +- Issue #12683: urlparse updated to include svn as schemes that uses relative + paths. (svn from 1.5 onwards support relative path). + - Issue #12655: Expose functions from sched.h in the os module: sched_yield(), sched_setscheduler(), sched_getscheduler(), sched_setparam(), sched_get_min_priority(), sched_get_max_priority(), sched_rr_get_interval(), -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Wed Aug 3 16:10:43 2011 From: python-checkins at python.org (senthil.kumaran) Date: Wed, 03 Aug 2011 16:10:43 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=282=2E7=29=3A_add_svn+ssh_tes?= =?utf8?q?tcase_for_urljoin=2E?= Message-ID: http://hg.python.org/cpython/rev/5a0726fcb18a changeset: 71737:5a0726fcb18a branch: 2.7 parent: 71731:aa9342093199 user: Senthil Kumaran date: Wed Aug 03 22:06:05 2011 +0800 summary: add svn+ssh testcase for urljoin. files: Lib/test/test_urlparse.py | 1 + 1 files changed, 1 insertions(+), 0 deletions(-) diff --git a/Lib/test/test_urlparse.py b/Lib/test/test_urlparse.py --- a/Lib/test/test_urlparse.py +++ b/Lib/test/test_urlparse.py @@ -340,6 +340,7 @@ self.checkJoin('', 'http://a/b/c/g?y/./x','http://a/b/c/g?y/./x') self.checkJoin('', 'http://a/./g', 'http://a/./g') self.checkJoin('svn://pathtorepo/dir1','dir2','svn://pathtorepo/dir2') + self.checkJoin('svn+ssh://pathtorepo/dir1','dir2','svn+ssh://pathtorepo/dir2') def test_RFC2732(self): for url, hostname, port in [ -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Wed Aug 3 16:10:44 2011 From: python-checkins at python.org (senthil.kumaran) Date: Wed, 03 Aug 2011 16:10:44 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=283=2E2=29=3A_change_the_redu?= =?utf8?q?ndant_svn_scheme_urljoin_test_case_to_svn+ssh_scheme=2E?= Message-ID: http://hg.python.org/cpython/rev/4957131ad9dd changeset: 71738:4957131ad9dd branch: 3.2 parent: 71734:23e4ed4c8bf9 user: Senthil Kumaran date: Wed Aug 03 22:08:46 2011 +0800 summary: change the redundant svn scheme urljoin test case to svn+ssh scheme. files: Lib/test/test_urlparse.py | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Lib/test/test_urlparse.py b/Lib/test/test_urlparse.py --- a/Lib/test/test_urlparse.py +++ b/Lib/test/test_urlparse.py @@ -372,7 +372,7 @@ self.checkJoin('', 'http://a/b/c/g?y/./x','http://a/b/c/g?y/./x') self.checkJoin('', 'http://a/./g', 'http://a/./g') self.checkJoin('svn://pathtorepo/dir1', 'dir2', 'svn://pathtorepo/dir2') - self.checkJoin('svn://pathtorepo/dir1', 'dir2', 'svn://pathtorepo/dir2') + self.checkJoin('svn+ssh://pathtorepo/dir1', 'dir2', 'svn+ssh://pathtorepo/dir2') def test_RFC2732(self): str_cases = [ -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Wed Aug 3 16:10:45 2011 From: python-checkins at python.org (senthil.kumaran) Date: Wed, 03 Aug 2011 16:10:45 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=28merge_3=2E2_-=3E_default=29?= =?utf8?q?=3A_merge_from_3=2E2_-_change_the_extra_svn_to_svn+ssh?= Message-ID: http://hg.python.org/cpython/rev/65c412586901 changeset: 71739:65c412586901 parent: 71736:e8a53f4643ce parent: 71738:4957131ad9dd user: Senthil Kumaran date: Wed Aug 03 22:09:51 2011 +0800 summary: merge from 3.2 - change the extra svn to svn+ssh files: Lib/test/test_urlparse.py | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Lib/test/test_urlparse.py b/Lib/test/test_urlparse.py --- a/Lib/test/test_urlparse.py +++ b/Lib/test/test_urlparse.py @@ -372,7 +372,7 @@ self.checkJoin('', 'http://a/b/c/g?y/./x','http://a/b/c/g?y/./x') self.checkJoin('', 'http://a/./g', 'http://a/./g') self.checkJoin('svn://pathtorepo/dir1', 'dir2', 'svn://pathtorepo/dir2') - self.checkJoin('svn://pathtorepo/dir1', 'dir2', 'svn://pathtorepo/dir2') + self.checkJoin('svn+ssh://pathtorepo/dir1', 'dir2', 'svn+ssh://pathtorepo/dir2') def test_RFC2732(self): str_cases = [ -- Repository URL: http://hg.python.org/cpython From solipsis at pitrou.net Thu Aug 4 05:25:21 2011 From: solipsis at pitrou.net (solipsis at pitrou.net) Date: Thu, 04 Aug 2011 05:25:21 +0200 Subject: [Python-checkins] Daily reference leaks (65c412586901): sum=0 Message-ID: results for 65c412586901 on branch "default" -------------------------------------------- Command line was: ['./python', '-m', 'test.regrtest', '-uall', '-R', '3:3:/home/antoine/cpython/refleaks/reflogso7nu3', '-x'] From python-checkins at python.org Thu Aug 4 15:46:53 2011 From: python-checkins at python.org (ezio.melotti) Date: Thu, 4 Aug 2011 15:46:53 +0200 (CEST) Subject: [Python-checkins] r88872 - in tracker/roundup-src: BUILD.txt CHANGES.txt demo.py doc/FAQ.txt doc/Makefile doc/acknowledgements.txt doc/admin_guide.txt doc/announcement.txt doc/customizing.txt doc/debugging.txt doc/design.txt doc/developers.txt doc/index.txt doc/installation.txt doc/mysql.txt doc/postgresql.txt doc/upgrading.txt doc/user_guide.txt doc/xmlrpc.txt frontends/roundup.cgi locale/de.po locale/it.po roundup/__init__.py roundup/actions.py roundup/admin.py roundup/anypy/cookie_.py roundup/anypy/dbm_.py roundup/anypy/email_.py roundup/anypy/hashlib_.py roundup/anypy/http_.py roundup/anypy/io_.py roundup/anypy/sets_.py roundup/anypy/urllib_.py roundup/backends/__init__.py roundup/backends/back_anydbm.py roundup/backends/back_mysql.py roundup/backends/back_postgresql.py roundup/backends/back_sqlite.py roundup/backends/blobfiles.py roundup/backends/indexer_common.py roundup/backends/indexer_dbm.py roundup/backends/indexer_rdbms.py roundup/backends/indexer_xapian.py roundup/backends/locking.py roundup/backends/portalocker.py roundup/backends/rdbms_common.py roundup/backends/sessions_dbm.py roundup/backends/sessions_rdbms.py roundup/cgi/KeywordsExpr.py roundup/cgi/accept_language.py roundup/cgi/actions.py roundup/cgi/cgitb.py roundup/cgi/client.py roundup/cgi/form_parser.py roundup/cgi/templating.py roundup/cgi/wsgi_handler.py roundup/configuration.py roundup/date.py roundup/dist/command/build.py roundup/dist/command/install_lib.py roundup/hyperdb.py roundup/init.py roundup/instance.py roundup/mailer.py roundup/mailgw.py roundup/password.py roundup/roundupdb.py roundup/scripts/roundup_mailgw.py roundup/scripts/roundup_server.py roundup/security.py roundup/xmlrpc.py scripts/imapServer.py setup.py share/roundup/templates/classic/html/_generic.index.html share/roundup/templates/classic/html/_generic.keywords_expr.html share/roundup/templates/classic/html/issue.search.html share/roundup/templates/classic/html/page.html share/roundup/templates/classic/html/style.css share/roundup/templates/classic/schema.py share/roundup/templates/minimal/html/_generic.index.html share/roundup/templates/minimal/html/page.html share/roundup/templates/minimal/html/style.css share/roundup/templates/minimal/schema.py test/db_test_base.py test/memorydb.py test/session_common.py test/test_actions.py test/test_cgi.py test/test_dates.py test/test_indexer.py test/test_mailer.py test/test_mailgw.py test/test_memorydb.py test/test_multipart.py test/test_mysql.py test/test_postgresql.py test/test_security.py test/test_sqlite.py test/test_templating.py test/test_xmlrpc.py Message-ID: <3RVCHK6Vr0zN4C@mail.python.org> Author: ezio.melotti Date: Thu Aug 4 15:46:52 2011 New Revision: 88872 Log: #411: Upgrade to 1.4.19. Added: tracker/roundup-src/roundup/anypy/cookie_.py tracker/roundup-src/roundup/anypy/dbm_.py tracker/roundup-src/roundup/anypy/email_.py tracker/roundup-src/roundup/anypy/http_.py tracker/roundup-src/roundup/anypy/io_.py tracker/roundup-src/roundup/anypy/urllib_.py tracker/roundup-src/roundup/cgi/KeywordsExpr.py tracker/roundup-src/roundup/dist/command/install_lib.py tracker/roundup-src/share/roundup/templates/classic/html/_generic.keywords_expr.html tracker/roundup-src/test/memorydb.py tracker/roundup-src/test/test_mailer.py tracker/roundup-src/test/test_memorydb.py Modified: tracker/roundup-src/BUILD.txt tracker/roundup-src/CHANGES.txt tracker/roundup-src/demo.py tracker/roundup-src/doc/FAQ.txt tracker/roundup-src/doc/Makefile tracker/roundup-src/doc/acknowledgements.txt tracker/roundup-src/doc/admin_guide.txt tracker/roundup-src/doc/announcement.txt tracker/roundup-src/doc/customizing.txt tracker/roundup-src/doc/debugging.txt tracker/roundup-src/doc/design.txt tracker/roundup-src/doc/developers.txt tracker/roundup-src/doc/index.txt tracker/roundup-src/doc/installation.txt tracker/roundup-src/doc/mysql.txt tracker/roundup-src/doc/postgresql.txt tracker/roundup-src/doc/upgrading.txt tracker/roundup-src/doc/user_guide.txt tracker/roundup-src/doc/xmlrpc.txt tracker/roundup-src/frontends/roundup.cgi tracker/roundup-src/locale/de.po tracker/roundup-src/locale/it.po tracker/roundup-src/roundup/__init__.py tracker/roundup-src/roundup/actions.py tracker/roundup-src/roundup/admin.py tracker/roundup-src/roundup/anypy/hashlib_.py tracker/roundup-src/roundup/anypy/sets_.py tracker/roundup-src/roundup/backends/__init__.py tracker/roundup-src/roundup/backends/back_anydbm.py tracker/roundup-src/roundup/backends/back_mysql.py tracker/roundup-src/roundup/backends/back_postgresql.py tracker/roundup-src/roundup/backends/back_sqlite.py tracker/roundup-src/roundup/backends/blobfiles.py tracker/roundup-src/roundup/backends/indexer_common.py tracker/roundup-src/roundup/backends/indexer_dbm.py tracker/roundup-src/roundup/backends/indexer_rdbms.py tracker/roundup-src/roundup/backends/indexer_xapian.py tracker/roundup-src/roundup/backends/locking.py tracker/roundup-src/roundup/backends/portalocker.py tracker/roundup-src/roundup/backends/rdbms_common.py tracker/roundup-src/roundup/backends/sessions_dbm.py tracker/roundup-src/roundup/backends/sessions_rdbms.py tracker/roundup-src/roundup/cgi/accept_language.py tracker/roundup-src/roundup/cgi/actions.py tracker/roundup-src/roundup/cgi/cgitb.py tracker/roundup-src/roundup/cgi/client.py tracker/roundup-src/roundup/cgi/form_parser.py tracker/roundup-src/roundup/cgi/templating.py tracker/roundup-src/roundup/cgi/wsgi_handler.py tracker/roundup-src/roundup/configuration.py tracker/roundup-src/roundup/date.py tracker/roundup-src/roundup/dist/command/build.py tracker/roundup-src/roundup/hyperdb.py tracker/roundup-src/roundup/init.py tracker/roundup-src/roundup/instance.py tracker/roundup-src/roundup/mailer.py tracker/roundup-src/roundup/mailgw.py tracker/roundup-src/roundup/password.py tracker/roundup-src/roundup/roundupdb.py tracker/roundup-src/roundup/scripts/roundup_mailgw.py tracker/roundup-src/roundup/scripts/roundup_server.py tracker/roundup-src/roundup/security.py tracker/roundup-src/roundup/xmlrpc.py tracker/roundup-src/scripts/imapServer.py tracker/roundup-src/setup.py tracker/roundup-src/share/roundup/templates/classic/html/_generic.index.html tracker/roundup-src/share/roundup/templates/classic/html/issue.search.html tracker/roundup-src/share/roundup/templates/classic/html/page.html tracker/roundup-src/share/roundup/templates/classic/html/style.css tracker/roundup-src/share/roundup/templates/classic/schema.py tracker/roundup-src/share/roundup/templates/minimal/html/_generic.index.html tracker/roundup-src/share/roundup/templates/minimal/html/page.html tracker/roundup-src/share/roundup/templates/minimal/html/style.css tracker/roundup-src/share/roundup/templates/minimal/schema.py tracker/roundup-src/test/db_test_base.py tracker/roundup-src/test/session_common.py tracker/roundup-src/test/test_actions.py tracker/roundup-src/test/test_cgi.py tracker/roundup-src/test/test_dates.py tracker/roundup-src/test/test_indexer.py tracker/roundup-src/test/test_mailgw.py tracker/roundup-src/test/test_multipart.py tracker/roundup-src/test/test_mysql.py tracker/roundup-src/test/test_postgresql.py tracker/roundup-src/test/test_security.py tracker/roundup-src/test/test_sqlite.py tracker/roundup-src/test/test_templating.py tracker/roundup-src/test/test_xmlrpc.py Modified: tracker/roundup-src/BUILD.txt ============================================================================== --- tracker/roundup-src/BUILD.txt (original) +++ tracker/roundup-src/BUILD.txt Thu Aug 4 15:46:52 2011 @@ -9,31 +9,33 @@ Building and distributing a release of Roundup is done by running: 1. Make sure the unit tests run! "./run_tests.py" -2. XXX "tag" SVN?? -3. Edit roundup/__init__.py and doc/announcement.txt to reflect the new - version and appropriate announcements. Add truncated announcement to - setup.py description field. +2. Edit roundup/__init__.py and doc/announcement.txt to reflect the new + version and appropriate announcements. +3. Note the SVN revision in the CHANGES.txt file. 4. Clean out all *.orig, *.rej, .#* files from the source. 5. python setup.py clean --all 6. Edit setup.py to ensure that all information therein (version, contact information etc) is correct. -7. python setup.py sdist --manifest-only -8. Check the MANIFEST to make sure that any new files are included. If +7. python setup.py build_doc +8. python setup.py sdist --manifest-only +9. Check the MANIFEST to make sure that any new files are included. If they are not, edit MANIFEST.in to include them. "Documentation" for MANIFEST.in may be found in disutils.filelist._parse_template_line. -9. python setup.py sdist +10. python setup.py sdist (if you find sdist a little verbose, add "--quiet" to the end of the command) -10. Unpack the new dist file in /tmp then a) run_test.py and b) demo.py +11. Unpack the new dist file in /tmp then a) run_test.py and b) demo.py with all available Python versions. -11. Generate gpg signature with "gpg -a --detach-sign" -12. python setup.py bdist_rpm -13. python setup.py bdist_wininst -14. Send doc/announcement.txt to python-announce at python.org -15. Notify any other news services as appropriate... +12. Assuming all is well tag the release in SVN:: - http://freshmeat.net/projects/roundup/ + svn cp https://svn.roundup-tracker.org/svnroot/roundup/roundup/trunk \ + https://svn.roundup-tracker.org/svnroot/roundup/roundup/tags/release-1-4-19 +13. python setup.py bdist_rpm +14. python setup.py bdist_wininst +15. Send doc/announcement.txt to python-announce at python.org and + roundup-users at lists.sourceforge.net and + roundup-devel at lists.sourceforge.net So, those commands in a nice, cut'n'pasteable form:: @@ -41,11 +43,17 @@ find . -name '*.rej' -exec rm {} \; find . -name '.#*' -exec rm {} \; python setup.py clean --all + python setup.py build_doc python setup.py sdist --manifest-only python setup.py sdist --quiet python setup.py bdist_rpm python setup.py bdist_wininst python setup.py register - python2.5 setup.py sdist upload --sign + python setup.py sdist upload --sign + python2.5 setup.py bdist_wininst upload --sign +(if the last two fail make sure you're using python2.5+) +Note that python2.6 won't correctly create a bdist_wininst install on +Linux (it will produce a .exe with "linux" in the name). 2.7 still has +this bug (Ralf) Modified: tracker/roundup-src/CHANGES.txt ============================================================================== --- tracker/roundup-src/CHANGES.txt (original) +++ tracker/roundup-src/CHANGES.txt Thu Aug 4 15:46:52 2011 @@ -1,12 +1,365 @@ -This file contains the changes to the Roundup system over time. The entries -are given with the most recent entry first. +This file contains the changes to the Roundup system over time. +The entries are given with the most recent entry first. +Each entry has the deveoper who committed the change in brackets. +Entries without name were done by Richard Jones. + +2011-XX-XX 1.4.20 (r4XXX) + +Features: +Fixed: + +issue2550695: 'No sort or group' settings not retained when editing queries. + Reported and fixed by John Kristensen. Tested by Satchidanand Haridas. + (Bernhard) + +2011-07-15 1.4.19 (r4638) + +Features: + +- Xapian indexing improved: Slightly faster and slightly smaller database. + Closes issue2550687. Thanks to Olly Betts for the patch. (Bernhard Reiter) +- PostgreSQL backend minor improvement: database creation less likely to fail + for PostgreSQL versions >= 8.1 as the table "postgres" is used by default. + Closes issue2550543. Thanks to Kai Storbeck for the patch. (Bernhard Reiter) +- Allow HTMLRequest.batch to filter on other permissions than "View" + (e.g. on the new "Search" permission") by adding a "permission" + parameter. Thanks to Eli Collins for the patch. Closes issue2550699. (Ralf) + +Fixed: + +- Installation: Fixed an issue that prevented to use EasyInstall + and a Python egg. Thanks to Satchidanand Haridas for the patch and + John Kristensen for testing it. (Bernhard Reiter) +- The PostgreSQL backend quotes database names now for CREATE and DROP, + enabling more exotic tracker names. Closes issue2550497. + Thanks to Sebastian Harl for providing the patch. (Bernhard Reiter) +- Updated the url to point to www.roundup-tracker.org in two places in the + docs. (Bernhard Reiter) +- Do not depend on a CPython implementation detail anymore to make Roundup + more compatible with other Python implementations like PyPy. + Closes issue2550707. Thanks to Christof Meerwald. (Bernhard Reiter, Richard) +- Yet another fix to the mail gateway, messages got *all* files of + an issue, not just the new ones. Thanks to Rafal Bisingier for + reporting and proposing a fix. The regression test was updated. + (Ralf) +- Fix version numbers in upgrade documentation, the file-unlink defect + was in 1.4.17 not 1.4.16. Thanks to Rafal Bisingier. (Ralf) +- Fix encoded email header parsing if multiple encoded and non-encoded + parts are present. RFC2047 specifies that spacing is removed only + between encoded parts, we always removed the space. Note that this bug + was present before mail gateway refactoring :-) Thanks for thorough + testing of mail gateway code by Rafal Bisingier. (Ralf) +- The "Retire" permission was not being registered. (Richard) +- Fix StringIO issue2550713: io.StringIO in newer versions of python + returns unicode strings and expects a unicode string in the + constructor. Unfortunately csv doesn't handle unicode (yet). So we + need to use a BytesIO which gets the utf-8 string from the + web-interface. Compatibility for old versions by using + StringIO.StringIO for emulating a io.BytesIO also works. + Thanks to C?dric Krier for reporting. Closes issue2550713. + Added a regression test for EditCSVAction (Ralf) +- Fix issue2550691 where a Unix From-Header was sometimes inserted in + outgoing emails, thanks to Joseph Myers for the patch. (Ralf) + + +2011-05-29 1.4.18 (r4610) + +Features: + +- Norwegian Bokmal translation by Christian Aastorp (Ralf) +- Allow to specify additional cc and bcc emails (not roundup users) for + nosymessage used by the nosyreaction reactor. (Ralf) + +Fixed: + +- File-unlink defect in mailgw fixed! If an email was received + that contained no attachments, all previous files of the issue were unlinked. + This defect was introduced with the 1.4.17 release as an unwanted result + of the mail gate code refactoring. Thanks to Rafal Bisingier for reporting + and proposing a fix. There is now a regression test in place. (Ralf) + +2011-05-13 1.4.17 (r4605) + +Features: + +- Allow declaration of default_values for properties in schema. +- Add explicit "Search" permissions, see Security Fix below. +- Add "lookup" method to xmlrpc interface (Ralf Schlatterbeck) +- Multilinks can be filtered by combining elements with AND, OR and NOT + operators now. A javascript gui was added for "keywords", see issue2550648. + Developed by Sascha Teichmann; funded by Intevation. (Bernhard Reiter) +- Factor MailGW message parsing into a separate class, thanks to John + Kristensen who did the major work in issue2550576 -- I wouldn't + have attempted it without this. Fixes issue2550576. (Ralf) +- Now if the -C option to roundup-mailgw specifies "issue" this refers + to an issue-like class. The real class is determined from the + configured default class, or the -c option to the mailgw, or the class + resulting from mail subject parsing. We also accept multiple -S + options for the same class now. (Ralf) +- Optimisation: Late evaluation of Multilinks (only in rdbms backends): + previously we materialized each multilink in a Node -- this creates an + SQL query for each multilink (e.g. 'files' and 'messages' for each + line in the issue index display) -- even if the multilinks aren't + displayed. Now we compute multilinks only if they're accessed (and + keep them cached). +- Add a filter_iter similar to the existing filter call. This feature is + considered experimental. This is currently not used in the + web-interface but passes all tests for the filter call except sorting + by Multilinks (which isn't supported by SQL and isn't a sane concept + anyway). When using filter_iter instead of filter this saves a *lot* + of SQL queries: Filter returns only the IDs of Nodes in the database, + the additional content of a Node has to be fetched in a separate SQL + call. The new filter_iter also returns the IDs of Nodes (one by one, + it's an iterator) but pre-seeds the cache with the content of the + Node. The information needed for seeding the cache is retrieved in the + same SQL query as the ids. + +Fixed: + +- Security Fix: Add a check for search-permissions: now we allow + searching for properties only if the property is readable without a + check method or if an explicit search permission (see above unter + "Features) is given for the property. This fixes cases where a user + doesn't have access to a property but can deduce the content by + crafting a clever search, group or sort query. + see doc/upgrading.txt for how to fix your trackers! (Ralf Schlatterbeck). +- Range support in roundup-server so large files can be served, + e.g. media files on iOS/iPads; issue2550694. (Bernhard Reiter; + Thanks to Jon C. Thomason for the patch.) +- Fix search for xapian 1.2 issue2550676 + (Bernhard Reiter; Thanks to Olly Betts for providing the patch.) +- Some minor typos fixed in doc/customizing.txt (Thanks Ralf Hemmecke). +- XML-RPC documentation now linked from the docs/index (Bernhard Reiter). +- Fix setting of sys.path when importing schema.py, fixes issue2550675, + thanks to Bryce L Nordgren for reporting. (Ralf Schlatterbeck) +- clear the cache on commit for rdbms backends: Don't carry over cached + values from one transaction to the next (there may be other changes + from other transactions) see new ConcurrentDBTest for a + read-modify-update cycle that fails with the old caching behavior. + (Ralf Schlatterbeck) +- Fix incorrect setting of template in customizing.txt example action, + patch via issue2550682 (thanks John Kristensen) +- Configuration issue: On some postgresql 8.4 installations (notably on + debian squeeze) the default template database used for database + creation doesn't match the needed character encoding UTF8 -- a new + config option 'template' in the rdbms section now allows specification + of the template. You know you need this option if you get the error + message: + psycopg2.DataError: new encoding (UTF8) is incompatible with the + encoding of the template database (SQL_ASCII) + HINT: Use the same encoding as in the template database, or use + template0 as template. + (Ralf Schlatterbeck) +- Fixed bug in mailgw refactoring, patch issue2550697 (thanks Hubert + Touvet) +- Fix Password handling security issue2550688 (thanks Joseph Myers for + reporting and Eli Collins for fixing) -- this fixes all observations + by Joseph Myers except for auto-migration of existing passwords. +- Add new config-option 'migrate_passwords' in section 'web' to + auto-migrate passwords at web-login time. Default for the new option + is "yes" so if you don't want that passwords are auto-migrated to a + more secure password scheme on user login, set this to "no" before + running your tracker(s) after the upgrade. +- Add new config-option 'password_pbkdf2_default_rounds' in 'main' + section to configure the default parameter for new password + generation. Set this to a higher value on faster systems which want + more security. Thanks to Eli Collins for implementing this (see + issue2550688). +- Fix documentation for roundup-server about the 'host' parameter as + suggested in issue2550693, fixes the first part of this issue. Make + 'localhost' the new default for this parameter, note the upgrading + documentation of changed behaviour. We also deprecate the empty host + parameter for binding to all interfaces now (still left in for + compatibility). Thanks to Toni Mueller for providing the first version + of this patch and discussing implementations. +- Fixed bug in filter_iter refactoring (lazy multilinks), in rare cases + this would result in duplicate multilinks to the same node. We're now + going the safe route and doing lazy evaluation only for read-only + access, whenever updates are done we fetch everything. + +2010-10-08 1.4.16 (r4541) + +Features: + +- allow trackers to override the classes used to render properties in + templating per issue2550659 (thanks Ezio Melotti) +- new mailgw configuration item "subject_updates_title": If set to "no" + a changed subject in a reply to an issue will not update the issue + title with the changed subject. Thanks to Arkadiusz Kita and Peter + Funk for requesting the feature and discussing the implementation. + http://thread.gmane.org/gmane.comp.bug-tracking.roundup.user/10169 +- new rdbms config item sqlite_timeout makes the previously hard-coded + timeout of 30 seconds configurable. This is the time a client waits + for the locked database to become free before giving up. Used only for + SQLite backend. +- new mailgw config item unpack_rfc822 that unpacks message attachments + of type message/rfc822 and attaches the individual parts instead of + attaching the whole message/rfc822 attachment to the roundup issue. + +Fixed: + +- fixed reporting of source missing warnings +- relevant tests made locale independent, issue2550660 (thanks + Benni B?rmann for reporting). +- fix for incorrect except: syntax, issue2550661 (thanks Jakub Wilk) +- No longer use the root logger, use a logger with prefix "roundup", + see http://thread.gmane.org/gmane.comp.bug-tracking.roundup.devel/5356 +- improve handling of '>' when URLs are converted to links, issue2550664 + (thanks Ezio Melotti) +- fixed registration, issue2550665 (thanks Timo Paulssen) +- make sorting of multilinks in the web interface more robust, issue2550663 +- Fix charset of first text-part of outgoing multipart messages, thanks Dirk + Geschke for reporting, see + http://thread.gmane.org/gmane.comp.bug-tracking.roundup.user/10223 +- Fix handling of incoming message/rfc822 attachments. These resulted in + a weird mail usage error because the email module threw a TypeError + which roundup interprets as a Reject exception. Fixes issue2550667. + Added regression tests for message/rfc822 attachments with and without + configured unpacking (mailgw unpack_rfc822, see Features above) + Thanks to Benni B?rmann for reporting. +- Allow search_popup macro to work with all db classes, issue2550567 + (thanks John Kristensen) +- lower memory footprint for (journal-) import + + +2010-07-12 1.4.15 + +Fixed: + +- A bunch of regressions were introduced in the last release making Roundup + no longer work in Python releases prior to 2.6 +- make URL detection a little smarter about brackets per issue2550657 + (thanks Ezio Melotti) + + +2010-07-01 1.4.14 + +Features: + +- Preparations for getting 2to3 work, not completed yet. (Richard Jones) + +Fixed: + +- User input not escaped when a bad template name is supplied (thanks + Benjamin Pollack) +- The email for the first message on an issue was having its In-Reply-To + set to itself (thanks Eric Kow) +- Handle multiple @action values from broken trackers. +- Accept single-character subject lines +- xmlrpc handling of unicode characters and binary values, see + http://thread.gmane.org/gmane.comp.bug-tracking.roundup.user/10040 + thanks to Hauke Duden for reporting these. +- frontends/roundup.cgi got out of sync with the roundup.cgi.Client API +- Default to "text/plain" if no Content-Type header is present in email + (thanks Hauke Duden) +- Small documentation update regarding debugging aids (Bernhard Reiter) +- Indexer Xapian, made Xapian 1.2 compatible. Needs at least Xapian 1.0.0 now. + (Bernhard Reiter; Thanks to Olly Betts for providing the patch Issue2550647.) + + +2010-02-19 1.4.13 + +Fixed: +- Multilink edit fields lose their values (thanks Will Maier) + + +2010-02-09 1.4.12 (r4455) + +Features: +- Support IMAP CRAM-MD5, thanks Jochen Maes + +Fixes: +- Proper handling of 'Create' permissions in both mail gateway (earlier + commit r4405 by Richard), web interface, and xmlrpc. This used to + check 'Edit' permission previously. See + http://thread.gmane.org/gmane.comp.bug-tracking.roundup.devel/5133 + Add regression tests for proper handling of 'Create' and 'Edit' + permissions. +- Fix handling of non-ascii in realname in the nosy mailer, this used to + mangle the email address making it unusable when replying. Thanks to + intevation for funding the fix. +- Fix documentation on user required to run the tests, fixes + issue2550618, thanks to Chris aka 'radioking' +- Add simple doc about translating customised tracker content +- Add "flup" setup documentation, thanks Christian Glass +- Fix "Web Access" permission check to allow serving of static files to + Anonymous again +- Add check for "Web Access" permission in all web templating permission + checks +- Improvements in upgrading documentation, thanks Christian Glass +- Display 'today' in the account user's timezone, thanks David Wolever +- Fix file handle leak in some web interfaces with logging turned on, + fixes issue1675845 +- Attempt to generate more human-readable addresses in email, fixes + issue2550632 +- Allow value to be specified to multilink form element templating, fixes + issue2550613, thanks David Wolever +- Fix thread safety with stdin in roundup-server, fixes issue2550596 + (thanks Werner Hunger) + + +2009-12-21 1.4.11 (r4413) + +Features: +- Generic class editor may now restore retired items (thanks Ralf Hemmecke) + +Fixes: +- Fix security hole allowing user permission escalation (thanks Ralf + Schlatterbeck) +- More SSL fixes. SSL wants the underlying socket non-blocking. So we + don't call socket.setdefaulttimeout in case of SSL. This apparently + never raises a WantReadError from SSL. + This also fixes a case where a WantReadError is raised and apparently + the bytes already read are dropped (seems the WantReadError is really + an error, not just an indication to retry). +- Correct initial- and end-handshakes for SSL +- Update FAQ to mention infinite redirects with pathological settings of + the tracker->web variable. Closes issue2537286, thanks to "stuidge" + for reporting. +- Fix some format errors in italian translation file +- Some bugs issue classifiers were causing database lookup errors +- Fix security-problem: If user hasn't permission on a message (notably + files and content properties) and is on the nosy list, the content was + sent via email. We now check that user has permission on the message + content and files properties. Thanks to Intevation for funding this + fix. +- Fix traceback on .../msgN/ url, this requests the file content and for + apache mod_wsgi produced a traceback because the mime type is None for + messages, fixes issue2550586, thanks to Thomas Arendsen Hein for + reporting and to Intevation for funding the fix. +- Handle OPTIONS http request method in wsgi handler, fixes issue2550587. + Thanks to Thomas Arendsen Hein for reporting and to Intevation for + funding the fix. +- Add documentation for migrating to the Register permission and + fix mailgw to use Register permission, fixes issue2550599 +- Fix styling of calendar to make it more usable, fixes issue2550608 +- Fix typo in email section of user guide, fixes issue2550607 +- Fix WSGI response code (thanks Peter P?ml) +- Fix linking of an existing item to a newly created item, e.g. + edit action in web template is name="issue-1 at link@msg" value="msg1" + would trigger a traceback about an unbound variable. + Add new regression test for this case. May be related to (now closed) + issue1177477. Thanks to Intevation for funding the fix. +- Clean up all the places where role processing occurs. This is now in a + central place in hyperdb.Class and is used consistently throughout. + This also means now a template can override the way role processing + occurs (e.g. for elaborate permission schemes). Thanks to intevation + for funding the change. +- Fix issue2550606 (german translation bug) "an hour" is only used in + the context "in an hour" or "an hour ago" which translates to german + "in einer Stunde" or "vor einer Stunde". So "an hour" is translated + "einer Stunde" (which sounds wrong at first). Also note that date.py + already has a comment saying "XXX this is internationally broken" -- + but at least there's a workaround for german :-) Thanks to Chris + (radioking) for reporting. + 2009-10-09 1.4.10 (r4374) Fixes: - Minor update of doc/developers.txt to point to the new resources on www.roundup-tracker.org (Bernhard Reiter) -- Small CSS improvements regaring the search box (thanks Thomas Arendsan Hein) +- Small CSS improvements regaring the search box (thanks Thomas Arendsen Hein) (issue 2550589) - Indexers behaviour made more consistent regarding length of indexed words and stopwords (thanks Thomas Arendsen Hein, Bernhard Reiter)(issue 2550584) @@ -16,12 +369,13 @@ for export/import) has a new field size limit starting with python2.5. We now issue a warning during export if the limit is too small and use the csv_field_size configuration during import to set the limit for - the csv module. + the csv module. (Ralf Schlatterbeck) - Small fix for CGI-handling of XMLRPC requests for python2.4, this worked only for 2.5 and beyond due to a change in the xmlrpc interface - in python -- Document filter method of xmlrpc interface -- Fix interaction of SSL and XMLRPC, now XMLRPC works with SSL + in python (Ralf Schlatterbeck) +- Document filter method of xmlrpc interface (Ralf Schlatterbeck) +- Fix interaction of SSL and XMLRPC, now XMLRPC works with SSL + (Ralf Schlatterbeck) 2009-08-10 1.4.9 (r4346) Modified: tracker/roundup-src/demo.py ============================================================================== --- tracker/roundup-src/demo.py (original) +++ tracker/roundup-src/demo.py Thu Aug 4 15:46:52 2011 @@ -115,7 +115,7 @@ run demo on a server host, please stop the demo, open file "demo/config.ini" with your editor, change the host name in the "web" option in section "[tracker]", save the file, then re-run the demo -program. +program. If you want to change backend types, you must use "nuke". ''' % url Modified: tracker/roundup-src/doc/FAQ.txt ============================================================================== --- tracker/roundup-src/doc/FAQ.txt (original) +++ tracker/roundup-src/doc/FAQ.txt Thu Aug 4 15:46:52 2011 @@ -190,6 +190,12 @@ Make sure that the ``tracker`` -> ``web`` setting in your tracker's config.ini is set to the URL of the tracker. +I'm getting infinite redirects in the browser +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +A wrong value for the ``tracker`` -> ``web`` setting may also result in +infinite redirects, see http://issues.roundup-tracker.org/issue2537286 + How is sorting performed, and why does it seem to fail sometimes? ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Modified: tracker/roundup-src/doc/Makefile ============================================================================== --- tracker/roundup-src/doc/Makefile (original) +++ tracker/roundup-src/doc/Makefile Thu Aug 4 15:46:52 2011 @@ -1,4 +1,4 @@ -STXTOHTML = rst2html +STXTOHTML = rst2html.py STXTOHT = rst2ht.py WEBDIR = ../../htdocs/htdocs/doc-1.0 Modified: tracker/roundup-src/doc/acknowledgements.txt ============================================================================== --- tracker/roundup-src/doc/acknowledgements.txt (original) +++ tracker/roundup-src/doc/acknowledgements.txt Thu Aug 4 15:46:52 2011 @@ -6,12 +6,16 @@ Thanks also to the many people on the mailing list, in the sourceforge project and those who just report bugs: +Christian Aastorp Thomas Arendsen Hein, Nerijus Baliunas, +Benni B??rmann, Anthony Baxter, Marlon van den Berg, Bo Berglund, +Olly Betts, St??phane Bidoul, +Rafal Bisingier, Cameron Blackwood, Jeff Blaine, Duncan Booth, @@ -21,6 +25,7 @@ Steve Byan, Brett Cannon, Godefroid Chapelle, +Eli Collins, Roch'e Compaan, Wil Cooley, Joe Cooper, @@ -29,6 +34,7 @@ Bradley Dean, Toby Dickenson, Paul F. Dubois, +Hauke Duden, Eric Earnst, Peter Eisentraut, Andrew Eland, @@ -40,8 +46,10 @@ Stuart D. Gathman, Martin Geisler, Ajit George, +Dirk Geschke, Frank Gibbons, Johannes Gijsbers, +Christian Glass, Gus Gollings, Philipp Gortan, Dan Grassi, @@ -51,11 +59,15 @@ Engelbert Gruber, Bruce Guenter, Tam??s Gul??csi, +Satchidanand Haridas, +Sebastian Harl, +Ralf Hemmecke, Juergen Hermann, Tobias Herp, Uwe Hoffmann, Alex Holkner, Tobias Hunger, +Werner Hunger, Simon Hyde, Paul Jimenez, Christophe Kalt, @@ -66,29 +78,38 @@ Michael Klatt, Bastian Kleineidam, Axel Kollmorgen, -Cedric Krier, +C??dric Krier, +John Kristensen, Detlef Lannert, Andrey Lebedev, Henrik Levkowetz, David Linke, Martin v. L??wis, Fredrik Lundh, +Jochen Maes, Will Maier, Ksenia Marasanova, Georges Martin, Gordon McMillan, +Christof Meerwald, John F Meinel Jr, Roland Meister, +Ezio Melotti, Ulrik Mikaelsson, John Mitchell, Ramiro Morales, Toni Mueller, +Joseph Myers, Stefan Niederhauser, Truls E. N??ss, +Bryce L Nordgren, Patrick Ohly, Luke Opperman, Eddie Parker, Will Partain, +Timo Paulssen, +Benjamin Pollack, +Peter P??ml, Ewout Prangsma, Marcus Priesch, Bernhard Reiter, @@ -114,10 +135,12 @@ Nathaniel Smith, Leonardo Soto, Maciej Starzyk, +Kai Storbeck, Mitchell Surface, Anatoly T., Jon C. Thomason Mike Thompson, +Hubert Touvet, Michael Twomey, Joseph E. Trent, Karl Ulbrich, @@ -125,8 +148,10 @@ Darryl VanDorp, J Vickroy, Timothy J. Warren, +Jakub Wilk, William (Wilk), Tue Wennerberg, Matt Wilbert, Chris Withers, +David Wolever, Milan Zamazal. Modified: tracker/roundup-src/doc/admin_guide.txt ============================================================================== --- tracker/roundup-src/doc/admin_guide.txt (original) +++ tracker/roundup-src/doc/admin_guide.txt Thu Aug 4 15:46:52 2011 @@ -74,7 +74,7 @@ [main] port = 8080 - ;hostname = + ;host = ;user = ;group = ;log_ip = yes @@ -93,9 +93,13 @@ **port** Defines the local TCP port to listen for clients on. -**hostname** - Defines the local hostname to listen for clients on. Only required if - "localhost" is not sufficient. +**host** + Defines the hostname or IP number to listen for clients on. Only + required if `localhost` is not sufficient. If left empty (as opposed + to no `host` keyword in the config-file) this will listen to all + network interfaces and is equivalent to an explicit address `0.0.0.0`. + The use of an empty string to listen to all interfaces is deprecated + and will go away in a future version. **user** and **group** Defines the Unix user and group to run the server as. Only work if the server is started as root. Modified: tracker/roundup-src/doc/announcement.txt ============================================================================== --- tracker/roundup-src/doc/announcement.txt (original) +++ tracker/roundup-src/doc/announcement.txt Thu Aug 4 15:46:52 2011 @@ -1,28 +1,57 @@ -I'm proud to release version 1.4.10 of Roundup which fixes some bugs: +I'm proud to release version 1.4.19 of Roundup which introduces some +minor features and, as usual, fixes some bugs: -- Minor update of doc/developers.txt to point to the new resources - on www.roundup-tracker.org (Bernhard Reiter) -- Small CSS improvements regaring the search box (thanks Thomas Arendsan Hein) - (issue 2550589) -- Indexers behaviour made more consistent regarding length of indexed words - and stopwords (thanks Thomas Arendsen Hein, Bernhard Reiter)(issue 2550584) -- fixed typos in the installation instructions (thanks Thomas Arendsen Hein) - (issue 2550573) -- New config option csv_field_size: Pythons csv module (which is used - for export/import) has a new field size limit starting with python2.5. - We now issue a warning during export if the limit is too small and use - the csv_field_size configuration during import to set the limit for - the csv module. -- Small fix for CGI-handling of XMLRPC requests for python2.4, this - worked only for 2.5 and beyond due to a change in the xmlrpc interface - in python -- Document filter method of xmlrpc interface -- Fix interaction of SSL and XMLRPC, now XMLRPC works with SSL +Features: + +- Xapian indexing improved: Slightly faster and slightly smaller database. + Closes issue2550687. Thanks to Olly Betts for the patch. (Bernhard Reiter) +- PostgreSQL backend minor improvement: database creation less likely to fail + for PostgreSQL versions >= 8.1 as the table "postgres" is used by default. + Closes issue2550543. Thanks to Kai Storbeck for the patch. (Bernhard Reiter) +- Allow HTMLRequest.batch to filter on other permissions than "View" + (e.g. on the new "Search" permission") by adding a "permission" + parameter. Thanks to Eli Collins for the patch. Closes issue2550699. (Ralf) + +Fixed: + +- Installation: Fixed an issue that prevented to use EasyInstall + and a Python egg. Thanks to Satchidanand Haridas for the patch and + John Kristensen for testing it. (Bernhard Reiter) +- The PostgreSQL backend quotes database names now for CREATE and DROP, + enabling more exotic tracker names. Closes issue2550497. + Thanks to Sebastian Harl for providing the patch. (Bernhard Reiter) +- Updated the url to point to www.roundup-tracker.org in two places in the + docs. (Bernhard Reiter) +- Do not depend on a CPython implementation detail anymore to make Roundup + more compatible with other Python implementations like PyPy. + Closes issue2550707. Thanks to Christof Meerwald. (Bernhard Reiter, Richard) +- Yet another fix to the mail gateway, messages got *all* files of + an issue, not just the new ones. Thanks to Rafal Bisingier for + reporting and proposing a fix. The regression test was updated. + (Ralf) +- Fix version numbers in upgrade documentation, the file-unlink defect + was in 1.4.17 not 1.4.16. Thanks to Rafal Bisingier. (Ralf) +- Fix encoded email header parsing if multiple encoded and non-encoded + parts are present. RFC2047 specifies that spacing is removed only + between encoded parts, we always removed the space. Note that this bug + was present before mail gateway refactoring :-) Thanks for thorough + testing of mail gateway code by Rafal Bisingier. (Ralf) +- The "Retire" permission was not being registered. (Richard) +- Fix StringIO issue2550713: io.StringIO in newer versions of python + returns unicode strings and expects a unicode string in the + constructor. Unfortunately csv doesn't handle unicode (yet). So we + need to use a BytesIO which gets the utf-8 string from the + web-interface. Compatibility for old versions by using + StringIO.StringIO for emulating a io.BytesIO also works. + Thanks to Cedric Krier for reporting. Closes issue2550713. + Added a regression test for EditCSVAction (Ralf) +- Fix issue2550691 where a Unix From-Header was sometimes inserted in + outgoing emails, thanks to Joseph Myers for the patch. (Ralf) If you're upgrading from an older version of Roundup you *must* follow the "Software Upgrade" guidelines given in the maintenance documentation. -Roundup requires python 2.3 or later (but not 3+) for correct operation. +Roundup requires python 2.4 or later (but not 3+) for correct operation. To give Roundup a try, just download (see below), unpack and run:: @@ -31,7 +60,7 @@ Release info and download page: http://cheeseshop.python.org/pypi/roundup Source and documentation is available at the website: - http://roundup.sourceforge.net/ + http://roundup-tracker.org/ Mailing lists - the place to ask questions: http://sourceforge.net/mail/?group_id=31577 @@ -56,7 +85,7 @@ The system will facilitate communication among the participants by managing discussions and notifying interested parties when issues are edited. One of the major design goals for Roundup that it be simple to get going. Roundup -is therefore usable "out of the box" with any python 2.3+ (but not 3+) +is therefore usable "out of the box" with any python 2.4+ (but not 3+) installation. It doesn't even need to be "installed" to be operational, though an install script is provided. Modified: tracker/roundup-src/doc/customizing.txt ============================================================================== --- tracker/roundup-src/doc/customizing.txt (original) +++ tracker/roundup-src/doc/customizing.txt Thu Aug 4 15:46:52 2011 @@ -362,6 +362,11 @@ an issue for the interval after the issue's creation or last activity. The interval is a standard Roundup interval. + subject_updates_title -- ``yes`` + Update issue title if incoming subject of email is different. + Setting this to ``no`` will ignore the title part of + the subject of incoming email messages. + refwd_re -- ``(\s*\W?\s*(fw|fwd|re|aw|sv|ang)\W)+`` Regular expression matching a single reply or forward prefix prepended by the mailer. This is explicitly stripped from the @@ -384,6 +389,13 @@ Regular expression matching a blank line. Value is Python Regular Expression (UTF8-encoded). + ignore_alternatives -- ``no`` + When parsing incoming mails, roundup uses the first + text/plain part it finds. If this part is inside a + multipart/alternative, and this option is set, all other + parts of the multipart/alternative are ignored. The default + is to keep all parts and attach them to the issue. + Section **pgp** OpenPGP mail processing options @@ -496,7 +508,7 @@ them. **init** This function is responsible for setting up the initial state of your - tracker. It's called exactly once - but the ``roundup-admin initialise`` + tracker. It's called exactly once - by the ``roundup-admin initialise`` command. See the start of the section on `database content`_ for more info about how this works. @@ -958,6 +970,7 @@ - Create (everything) - Edit (everything) - View (everything) +- Register (User class only) These are assigned to the "Admin" Role by default, and allow a user to do anything. Every Class you define in your `tracker schema`_ also gets an @@ -995,7 +1008,7 @@ And the "Anonymous" Role is defined as: - Web interface access -- Create user (for registration) +- Register user (for registration) - View issue, file, msg, query, keyword, priority, status Put together, these settings appear in the tracker's ``schema.py`` file:: @@ -1166,7 +1179,7 @@ - they're *anonymous*. **automatic registration of users in the e-mail gateway** - By giving the "anonymous" user the ("Create", "user") Permission, any + By giving the "anonymous" user the ("Register", "user") Permission, any unidentified user will automatically be registered with the tracker (with no password, so they won't be able to log in through the web until an admin sets their password). By default new Roundup @@ -1610,7 +1623,7 @@ **user.register.html** a special page just for the user class, that renders the registration page -**style.css.html** +**style.css** a static file that is served up as-is The *classic* template has a number of additional templates. @@ -1971,7 +1984,7 @@ This is implemented by the ``roundup.cgi.templating.HTMLClass`` class. -This wrapper object provides access to a hyperb class. It is used +This wrapper object provides access to a hyperdb class. It is used primarily in both index view and new item views, but it's also usable anywhere else that you wish to access information about a class, or the items of a class, when you don't have a specific item of that class in @@ -2071,7 +2084,7 @@ This is implemented by the ``roundup.cgi.templating.HTMLItem`` class. -This wrapper object provides access to a hyperb item. +This wrapper object provides access to a hyperdb item. We allow access to properties. There will be no "id" property. The value accessed through the property will be the current value of the same name @@ -2547,6 +2560,15 @@ the "keyword" class (well, their "name" anyway). +Translations +~~~~~~~~~~~~ + +Should you wish to enable multiple languages in template content that you +create you'll need to add new locale files in the tracker home under a +``locale`` directory. Use the instructions in the ``developer's guide`` to +create the locale files. + + Displaying Properties --------------------- @@ -4605,6 +4627,22 @@ db.security.addPermissionToRole('User', 'Create', cl) +Moderating user registration +~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +You could set up new-user moderation in a public tracker by: + +1. creating a new highly-restricted user role "Pending", +2. set the config new_web_user_roles and/or new_email_user_roles to that + role, +3. have an auditor that emails you when new users are created with that + role using roundup.mailer +4. edit the role to "User" for valid users. + +Some simple javascript might help in the last step. If you have high volume +you could search for all currently-Pending users and do a bulk edit of all +their roles at once (again probably with some simple javascript help). + Changes to the Web User Interface --------------------------------- @@ -4831,10 +4869,10 @@ ''' category = self.form['category'].value if category == '-1': - self.error_message.append('You must select a category of report') + self.client.error_message.append('You must select a category of report') return # everything's ok, move on to the next page - self.template = 'add_page2' + self.client.template = 'add_page2' def init(instance): instance.registerAction('page1_submit', Page1SubmitAction) @@ -4862,3 +4900,4 @@ .. _`design documentation`: design.html +.. _`developer's guide`: developers.html Modified: tracker/roundup-src/doc/debugging.txt ============================================================================== --- tracker/roundup-src/doc/debugging.txt (original) +++ tracker/roundup-src/doc/debugging.txt Thu Aug 4 15:46:52 2011 @@ -1,31 +1,23 @@ -Debugging Flags ---------------- +Debugging Aids +-------------- -Roundup uses a number of debugging environment variables to help you -figure out what the heck it's doing. +Try turning on logging of DEBUG level messages. This may be done a number +of ways, depending on what it is you're testing: -HYPERDBDEBUG -============ +1. If you're testing the database unit tests, then set the environment + variable ``LOGGING_LEVEL=DEBUG``. This may be done like so: -This environment variable should be set to a filename - the hyperdb will -write debugging information for various events (including, for instance, -the SQL used). + LOGGING_LEVEL=DEBUG python run_tests.py -This is only obeyed when python is _not_ running in -O mode. + This variable replaces the older HYPERDBDEBUG environment var. -HYPERDBTRACE -============ - -This environment variable should be set to a filename - the hyperdb will -write a timestamp entry for various events. This appears to be suffering -rather extreme bit-rot and may go away soon. - -This is only obeyed when python is _not_ running in -O mode. +2. If you're testing a particular tracker, then set the logging level in + your tracker's ``config.ini``. SENDMAILDEBUG ============= -Set to a filename and roundup will write a copy of each email message -that it sends to that file. This environment variable is independent of -the python -O flag. +Set to a filename and roundup will write each email message +that it sends to that file instead to the internet. +This environment variable is independent of the python -O flag. Modified: tracker/roundup-src/doc/design.txt ============================================================================== --- tracker/roundup-src/doc/design.txt (original) +++ tracker/roundup-src/doc/design.txt Thu Aug 4 15:46:52 2011 @@ -1008,7 +1008,7 @@ Command Interface Specification ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -A single command, roundup, provides basic access to the hyperdatabase +A single command, ``roundup-admin``, provides basic access to the hyperdatabase from the command line:: roundup-admin help @@ -1039,11 +1039,12 @@ are both accepted; an empty string, a single item, or a list of items joined by commas is accepted. -When multiple items are specified to the roundup get or roundup set +When multiple items are specified to the roundup-admin get or roundup-admin set commands, the specified properties are retrieved or set on all the listed items. -When multiple results are returned by the roundup get or roundup find +When multiple results are returned by the roundup-admin get or +roundup-admin find commands, they are printed one per line (default) or joined by commas (with the -list) option. @@ -1055,8 +1056,8 @@ "spam", for example, you could execute the following command from the directory where the database dumps its files:: - shell% for issue in `roundup find issue status=in-progress`; do - > grep -l spam `roundup get $issue messages` + shell% for issue in `roundup-admin find issue status=in-progress`; do + > grep -l spam `roundup-admin get $issue messages` > done msg23 msg49 @@ -1066,8 +1067,8 @@ Or, using the -list option, this can be written as a single command:: - shell% grep -l spam `roundup get \ - \`roundup find -list issue status=in-progress\` messages` + shell% grep -l spam `roundup-admin get \ + \`roundup-admin find -list issue status=in-progress\` messages` msg23 msg49 msg50 @@ -1156,7 +1157,7 @@ The e-mail interface also provides a simple way to set properties on issues. At the end of the subject line, ``propname=value`` pairs can be specified in square brackets, using the same conventions as for the -roundup ``set`` shell command. +roundup-admin ``set`` shell command. Web User Interface Modified: tracker/roundup-src/doc/developers.txt ============================================================================== --- tracker/roundup-src/doc/developers.txt (original) +++ tracker/roundup-src/doc/developers.txt Thu Aug 4 15:46:52 2011 @@ -22,14 +22,36 @@ - The issue tracker running at http://issues.roundup-tracker.org/ -Website, wiki, issue tracker ----------------------------- +Website, wiki +------------- -1. Log into ,roundup at shell.sourceforge.net +1. ssh -t ,roundup at shell.sourceforge.net create 2. cd /home/groups/r/ro/roundup 3. follow instructions in README.txt +Issue Tracker +------------- + +The tracker resides on psf.upfronthosting.co.za. The roundup installation +belongs to the user roundup. In ~roundup, all trackers are stored and +the roundup code itself. roundup is started through /etc/init.d/roundup; +other parts of the installation are started through +/etc/init.d/{postgresql-8-1,spambayes,postfix}. + +The machine is operated by Upfronthosting in South Africa. The meta +tracker is http://psf.upfronthosting.co.za/roundup/meta/ +In this tracker, Upfronthosting people are the users izak and roche. + +The Roundup tracker http://issues.roundup-tracker.org/ is in +~roundup/trackers/roundup + +The configuration is in the "web/trunk/issues" section of Roundup's +Subversion repository and copied manually to the live tracker. + +A checkout of the roundup sources is in ~roundup/src/roundup-src. + + Small Changes ------------- @@ -75,19 +97,7 @@ Debugging Aids -------------- -Try turning on logging of DEBUG level messages. This may be done a number -of ways, depending on what it is you're testing: - -1. If you're testing the database unit tests, then set the environment - variable ``LOGGING_LEVEL=DEBUG``. This may be done like so: - - LOGGING_LEVEL=DEBUG python run_tests.py - - This variable replaces the older HYPERDBDEBUG environment var. - -2. If you're testing a particular tracker, then set the logging level in - your tracker's ``config.ini``. - +See `debugging.txt`_. Internationalization Notes -------------------------- Modified: tracker/roundup-src/doc/index.txt ============================================================================== --- tracker/roundup-src/doc/index.txt (original) +++ tracker/roundup-src/doc/index.txt Thu Aug 4 15:46:52 2011 @@ -15,6 +15,7 @@ user_guide customizing admin_guide + xmlrpc spec original design developers Modified: tracker/roundup-src/doc/installation.txt ============================================================================== --- tracker/roundup-src/doc/installation.txt (original) +++ tracker/roundup-src/doc/installation.txt Thu Aug 4 15:46:52 2011 @@ -69,9 +69,7 @@ installed and used. You will need to run the "roundup-admin reindex" command if the tracker has existing data. - Roundup requires Xapian *newer* than 0.9.2 - it may be necessary for - you to install a snapshot. Snapshot "0.9.2_svn6532" has been tried - successfully. + Roundup requires Xapian 1.0.0 or newer. pyopenssl If pyopenssl_ is installed the roundup-server can be configured @@ -85,7 +83,7 @@ configured, you can require email to be cryptographically signed before roundup will allow it to make modifications to issues. -.. _Xapian: http://www.xapian.org/ +.. _Xapian: http://xapian.org/ .. _pytz: http://www.python.org/pypi/pytz .. _Olson tz database: http://www.twinsun.com/tz/tz-link.htm .. _pyopenssl: http://pyopenssl.sourceforge.net @@ -101,7 +99,7 @@ and if it runs you may skip the `Basic Installation Steps`_ below and go straight to `configuring your first tracker`_. -Download the latest version from http://roundup.sf.net/. +Download the latest version from http://www.roundup-tracker.org/. If you're using WinZIP's "classic" interface, make sure the "Use folder names" check box is checked before you extract the files. @@ -234,7 +232,7 @@ Confirm: Note: running this command will *destroy any existing data in the - database*. In the case of MySQL and PostgreSQL, any exsting database + database*. In the case of MySQL and PostgreSQL, any existing database will be dropped and re-created. Once this is done, the tracker has been created. @@ -326,10 +324,11 @@ There are five web interfaces to choose from: 1. `web server cgi-bin`_ -2. `stand-alone web server`_ -3. `Zope product - ZRoundup`_ -4. `Apache HTTP Server with mod_python`_ -5. `WSGI handler`_ +2. `cgi-bin for limited-access hosting`_ +3. `stand-alone web server`_ +4. `Zope product - ZRoundup`_ +5. `Apache HTTP Server with mod_python`_ +6. `WSGI handler`_ You may need to give the web server user permission to access the tracker home - see the `UNIX environment steps`_ for information. You may also need to @@ -387,6 +386,48 @@ +CGI-bin for Limited-Access Hosting +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +If you are running in a shared-hosting environment or otherwise don't have +permissiong to edit the system web server's configuration, but can create a +``.htaccess`` file then you may be able to use this approach. + +1. Install flup_ +2. Create a script ``roundup_stub`` in your server's ``cgi-bin`` directory + containing:: + + #!/usr/bin/env python + + # if necessary modify the Python path to include the place you + # installed Roundup + #import sys + #sys.path.append('...') + + # cgitb is needed for debugging in browser only + #import cgitb + #cgitb.enable() + + # obtain the WSGI request dispatcher + from roundup.cgi.wsgi_handler import RequestDispatcher + tracker_home = '/path/to/tracker/home' + app = RequestDispatcher(tracker_home) + + from flup.server.cgi import WSGIServer + WSGIServer(app).run() + +3. Modify or created the ``.htaccess`` file in the desired (sub-)domain + directory to contain:: + + RewriteEngine On + RewriteBase / + RewriteRule ^(.*)$ /cgi-bin/roundup_stub/$1 [L] + +Now loading the (sub-)domain in a browser should load the tracker web +interface. If you get a "500" error then enable the "cgitb" lines in the +stub to get some debugging information. + + Stand-alone Web Server ~~~~~~~~~~~~~~~~~~~~~~ @@ -998,14 +1039,23 @@ - users of the Windows installer, other binary distributions or pre-installed Roundup will need to download the source to use it. + Remember to have a database user 'rounduptest' prepared (with + password 'rounduptest'). This user + must have at least the rights to create and drop databases. + Documentation: details on `adding MySQL users`_, + for PostgreSQL you want to call the ``createuser`` command with the + ``-d`` option to allow database creation. + Once you've unpacked roundup's source, run ``python run_tests.py`` in the source directory and make sure there are no errors. If there are errors, please let us know! If the above fails, you may be using the wrong version of python. Try -``python2 run_tests.py``. If that works, you will need to substitute -``python2`` for ``python`` in all further commands you use in relation to -Roundup -- from installation and scripts. +``python2 run_tests.py`` or ``python2.X run_tests.py`` where ``X`` is in +the set 3,4,5,6 depending on the version(s) of python installed. +If that works, you will need to substitute ``python2`` or ``python2.X`` +for ``python`` in all further commands you use in relation to Roundup -- +from installation and scripts. .. _`table of contents`: index.html @@ -1025,7 +1075,10 @@ .. _External hyperlink targets: .. _apache: http://httpd.apache.org/ +.. _flup: http://pypi.python.org/pypi/flup .. _mod_python: http://www.modpython.org/ .. _MySQLdb: http://sourceforge.net/projects/mysql-python .. _Psycopg: http://initd.org/software/initd/psycopg .. _pysqlite: http://pysqlite.org/ +.. _`adding MySQL users`: + http://dev.mysql.com/doc/refman/5.1/en/adding-users.html Modified: tracker/roundup-src/doc/mysql.txt ============================================================================== --- tracker/roundup-src/doc/mysql.txt (original) +++ tracker/roundup-src/doc/mysql.txt Thu Aug 4 15:46:52 2011 @@ -37,8 +37,8 @@ FLUSH PRIVILEGES; 2. If your administrator has provided you with database connection info, - you can modify MYSQL_* constants in the file test/test_db.py with - the correct values. + see the config values in 'test/db_test_base.py' + about which database connection, name and user will be used. The MySQL database should not contain any tables. Tests will not drop the database with existing data. Modified: tracker/roundup-src/doc/postgresql.txt ============================================================================== --- tracker/roundup-src/doc/postgresql.txt (original) +++ tracker/roundup-src/doc/postgresql.txt Thu Aug 4 15:46:52 2011 @@ -19,20 +19,14 @@ It is recommended that you use at least version 1.1.21 -Some advice on setting up the postgresql backend may be found at: - - http://www.magma.com.ni/wiki/index.cgi?TipsRoundupPostgres - Running the PostgreSQL unit tests ================================= The user that you're running the tests as will need to be able to access the postgresql database on the local machine and create and drop -databases. Edit the ``test/test_postgresql.py`` database connection info if -you wish to test against a different database. - -The test database will be called "rounduptest". +databases. See the config values in 'test/db_test_base.py' +about which database connection, name and user will be used. Credit Modified: tracker/roundup-src/doc/upgrading.txt ============================================================================== --- tracker/roundup-src/doc/upgrading.txt (original) +++ tracker/roundup-src/doc/upgrading.txt Thu Aug 4 15:46:52 2011 @@ -13,6 +13,212 @@ .. contents:: +Migrating from 1.4.17 to 1.4.18 +=============================== + +There was a bug in 1.4.17 where files were unlinked from issues if a +mail without attachment was received via the mail interface. The +following script will list likely issues being affected by the bug. +The date in the script is the date of the 1.4.17 release. If you have +installed 1.4.17 later than this date, you can change the date +appropriately to your installation date. Run the script in the directory +of your tracker. + +#!/usr/bin/python +import os +from roundup import instance +from roundup.date import Date +dir = os.getcwd () +tracker = instance.open (dir) +db = tracker.open ('admin') +# you may want to change this to your install date to find less candidates +last_release = Date('2011-05-13') +affected = {} +for i in db.issue.getnodeids(): + for j in db.issue.history(i): + if i in affected: + break + if j[1] < last_release or j[3] != 'set' or 'files' not in j[4]: + continue + for op, p in j[4]['files']: + if op == '-': + affected [i] = 1 + break +print ', '.join(sorted(affected.iterkeys())) + +To find out which files where attached before you can look in the +history of the affected issue. For fixing issues you can re-attach the +files in question using the "set" command of roundup-admin, e.g., if the +list of files attached to an issue should be files 5, 17, 23 for issue42 +you will set this using + +roundup-admin -i /path/to/your/tracker set issue42 files=5,17,23 + +Migrating from 1.4.x to 1.4.17 +============================== + +There is a new config-option `migrate_passwords` in section `web` to +auto-migrate passwords at web-login time to a more secure storage +scheme. Default for the new option is "yes" so if you don't want that +passwords are auto-migrated to a more secure password scheme on user +login, set this to "no" before running your tracker(s) after the +upgrade. + +The standalone roundup-server now defaults to listening on localhost (no +longer on all network interfaces). This will not affect you if you're +already using a configuration file for roundup-server. If you are using +an empty setting for the `host` parameter in the config-file you should +explicitly put 0.0.0.0 there as the use of an empty string to specify +listening to all interfaces is deprecated and will go away in a future +version. If you are starting the server without a configuration file +and want to explicitly listen to all network interface, you should +specify the -n option with the address `0.0.0.0`. + +Searching now requires either read-permission without a check method, or +you will have to add a "Search" permission for a class or a list of +properties for a class (if you want to allow searching). For the classic +template (or other templates derived from it) you want to add the +following lines to your `schema.py` file:: + + p = db.security.addPermission(name='Search', klass='query') + db.security.addPermissionToRole('User', p) + +This is needed, because for the `query` class users may view only their +own queries (or public queries). This is implemented with a `check` +method, therefore the default search permissions will not allow +searching and you'll have to add an explicit search permission. +If you have modified your schema, you can check if you're missing any +search permissions with the following script, run it in your tracker +directory, it will list for each Class and Property the roles that may +search for this property:: + + #!/usr/bin/python + import os + from roundup import instance + + tracker = instance.open(os.getcwd ()) + db = tracker.open('admin') + + for cl in sorted(db.getclasses()): + print "Class:", cl + for p in sorted(db.getclass(cl).properties.keys()): + print " Property:", p + roles = [] + for role in sorted(db.security.role.iterkeys()): + if db.security.roleHasSearchPermission(cl,p,role): + roles.append(role) + print " roles may search:", ', '.join(roles) + + +Migrating from 1.4.x to 1.4.12 +============================== + +Item creation now checks the "Create" permission instead of the "Edit" +permission for individual properties. If you have modified your tracker +permissions from the default distribution, you should check that +"Create" permissions exist for all properties you want users to be able +to create. + + +Fixing some potential security holes +------------------------------------ + +Enhanced checking was added to the user registration auditor. If you +run a public tracker you should update your tracker's +``detectors/userauditor.py`` using the new code from +``share/roundup/templates/classic/detectors/userauditor.py``. In most +cases you may just copy the file over, but if you've made changes to +the auditor in your tracker then you'll need to manually integrate +the new code. + +Some HTML templates were found to have formatting security problems: + +``html/page.html``:: + + -tal:replace="request/user/username">username
+ +tal:replace="python:request.user.username.plain(escape=1)">username
+ +``html/_generic.help-list.html``:: + + -tal:content="structure python:item[prop]"> + +tal:content="python:item[prop]"> + +The lines marked "+" should be added and lines marked "-" should be +deleted (minus the "+"/"-" signs). + + +Some HTML interface tweaks +-------------------------- + +You may wish to copy the ``user_utils.js`` and ``style.css` files from the +source distribution ``share/roundup/templates/classic/html/`` directory to the +``html`` directory of your trackers as it includes a small improvement. + +If you have made local changes to those files you'll need to manually work +the differences in to your versions or ignore the changes. + + +Migrating from 1.4.x to 1.4.11 +============================== + +Close potential security hole +----------------------------- + +If your tracker has untrusted users you should examine its ``schema.py`` +file and look for the section granting the "Edit" permission to your users. +This should look something like:: + + p = db.security.addPermission(name='Edit', klass='user', check=own_record, + description="User is allowed to edit their own user details") + +and should be modified to restrict the list of properties they are allowed +to edit by adding the ``properties=`` section like:: + + p = db.security.addPermission(name='Edit', klass='user', check=own_record, + properties=('username', 'password', 'address', 'realname', 'phone', + 'organisation', 'alternate_addresses', 'queries', 'timezone'), + description="User is allowed to edit their own user details") + +Most importantly the "roles" property should not be editable - thus not +appear in that list of properties. + + +Grant the "Register" permission to the Anonymous role +----------------------------------------------------- + +A separate "Register" permission has been introduced to allow +anonymous users to register. This means you will need to add the +following to your tracker's ``schema.py`` to add the permission and +assign it to the Anonymous role (replacing any previously assigned +"Create user" permission for the Anonymous role):: + + +db.security.addPermission(name='Register', klass='user', + + description='User is allowed to register new user') + + # Assign the appropriate permissions to the anonymous user's Anonymous + # Role. Choices here are: + # - Allow anonymous users to register + -db.security.addPermissionToRole('Anonymous', 'Create', 'user') + +db.security.addPermissionToRole('Anonymous', 'Register', 'user') + +The lines marked "+" should be added and lines marked "-" should be +deleted (minus the "+"/"-" signs). + +You should also modify the ``html/page.html`` template to change the +permission tested there:: + + -tal:condition="python:request.user.hasPermission('Create', 'user')" + +tal:condition="python:request.user.hasPermission('Register', 'user')" + + +Generic class editor may now restore retired items +-------------------------------------------------- + +The instructions for doing so won't be present in your tracker unless you copy +the ``_generic.index.html`` template from the roundup distribution in +``share/roundup/templates/classic/html`` to your tracker's ``html`` directory. + + Migrating from 1.4.x to 1.4.9 ============================= @@ -106,7 +312,7 @@ Fix the "retire" link in the users list for admin users ------------------------------------------------------- -The "retire" link found in the file ``html/users.index.html``:: +The "retire" link found in the file ``html/user.index.html``:: >> roundup_server.filter('user',[],{'username':'adm'}) [] + >>> roundup_server.lookup('user','admin') + '1' Modified: tracker/roundup-src/frontends/roundup.cgi ============================================================================== --- tracker/roundup-src/frontends/roundup.cgi (original) +++ tracker/roundup-src/frontends/roundup.cgi Thu Aug 4 15:46:52 2011 @@ -120,6 +120,7 @@ '''Used to make the CGI server look like a BaseHTTPRequestHandler ''' def __init__(self, wfile): + self.rfile = sys.stdin self.wfile = wfile def write(self, data): self.wfile.write(data) Modified: tracker/roundup-src/locale/de.po ============================================================================== --- tracker/roundup-src/locale/de.po (original) +++ tracker/roundup-src/locale/de.po Thu Aug 4 15:46:52 2011 @@ -1788,7 +1788,7 @@ #: ../roundup/date.py:861 msgid "an hour" -msgstr "eine Stunde" +msgstr "einer Stunde" #: ../roundup/date.py:863 msgid "1 1/2 hours" Modified: tracker/roundup-src/locale/it.po ============================================================================== --- tracker/roundup-src/locale/it.po (original) +++ tracker/roundup-src/locale/it.po Thu Aug 4 15:46:52 2011 @@ -5,7 +5,6 @@ # # roundup.pot revision 1.22 # -#, fuzzy msgid "" msgstr "" "Project-Id-Version: roundup cvs\n" @@ -798,7 +797,7 @@ #: ../roundup/cgi/actions.py:58 #, python-format msgid "You do not have permission to %(action)s the %(classname)s class." -msgstr "Non hai i permessi per %{action) la classe %(classname)." +msgstr "Non hai i permessi per %(action)s la classe %(classname)s." #: ../roundup/cgi/actions.py:89 msgid "No type specified" @@ -811,7 +810,7 @@ #: ../roundup/cgi/actions.py:97 #, python-format msgid "\"%(input)s\" is not an ID (%(classname)s ID required)" -msgstr "\"%(input)\" non ?? un ID (%(ID della %(classname) ?? obbligatorio" +msgstr "\"%(input)s\" non ?? un ID (ID della %(classname)s ?? obbligatorio)" #: ../roundup/cgi/actions.py:117 msgid "You may not retire the admin or anonymous user" @@ -835,7 +834,7 @@ #: ../roundup/cgi/actions.py:298 #, python-format msgid "Not enough values on line %(line)s" -msgstr "Non abbastanza valori alla riga %(line)" +msgstr "Non abbastanza valori alla riga %(line)s" #: ../roundup/cgi/actions.py:345 msgid "Items edited OK" @@ -859,12 +858,12 @@ #: ../roundup/cgi/actions.py:452 #, python-format msgid "You do not have permission to edit %(class)s" -msgstr "Non hai i permessi per modificare i $(class)s" +msgstr "Non hai i permessi per modificare i %(class)s" #: ../roundup/cgi/actions.py:464 #, python-format msgid "You do not have permission to create %(class)s" -msgstr "Non hai il permesso per creare $(class)s" +msgstr "Non hai il permesso per creare %(class)s" #: ../roundup/cgi/actions.py:488 msgid "You do not have permission to edit user roles" @@ -978,7 +977,7 @@ #: ../roundup/cgi/cgitb.py:76 #, python-format msgid "A problem occurred in your template \"%s\"." -msgstr "?? occorso un problema nel tuo template" +msgstr "?? occorso un problema nel tuo template \"%s\"." #: ../roundup/cgi/cgitb.py:84 #, python-format @@ -1067,7 +1066,7 @@ #: ../roundup/cgi/client.py:758 #, python-format msgid "%(starttag)sTime elapsed: %(seconds)fs%(endtag)s\n" -msgstr "%(starttag)sTempo trascorso: %(seconds)fs%(endtad)s\n" +msgstr "%(starttag)sTempo trascorso: %(seconds)fs%(endtag)s\n" #: ../roundup/cgi/client.py:762 #, python-format @@ -1298,7 +1297,7 @@ msgid "%(number)s year" msgid_plural "%(number)s years" msgstr[0] "%(number)s anno" -msgstr[1] "%(numeber)s anni" +msgstr[1] "%(number)s anni" #: ../roundup/date.py:822 #, python-format @@ -1852,13 +1851,13 @@ #: ../roundup/scripts/roundup_server.py:347 #, python-format msgid "User %(user)s doesn't exist" -msgstr "L'utente $(user)s non esiste" +msgstr "L'utente %(user)s non esiste" #: ../roundup/scripts/roundup_server.py:481 #, python-format msgid "Multiprocess mode \"%s\" is not available, switching to single-process" msgstr "" -"La modalit?? multiprocesso non ?? disponibile, viene utilizzata quella a " +"La modalit?? multiprocesso \"%s\" non ?? disponibile, viene utilizzata quella a " "singolo processo" #: ../roundup/scripts/roundup_server.py:504 Modified: tracker/roundup-src/roundup/__init__.py ============================================================================== --- tracker/roundup-src/roundup/__init__.py (original) +++ tracker/roundup-src/roundup/__init__.py Thu Aug 4 15:46:52 2011 @@ -68,6 +68,6 @@ ''' __docformat__ = 'restructuredtext' -__version__ = '1.4.10' +__version__ = '1.4.19' # vim: set filetype=python ts=4 sw=4 et si Modified: tracker/roundup-src/roundup/actions.py ============================================================================== --- tracker/roundup-src/roundup/actions.py (original) +++ tracker/roundup-src/roundup/actions.py Thu Aug 4 15:46:52 2011 @@ -49,8 +49,8 @@ # make sure we don't try to retire admin or anonymous if (classname == 'user' and self.db.user.get(itemid, 'username') in ('admin', 'anonymous')): - raise ValueError, self._( - 'You may not retire the admin or anonymous user') + raise ValueError(self._( + 'You may not retire the admin or anonymous user')) # do the retire self.db.getclass(classname).retire(itemid) Modified: tracker/roundup-src/roundup/admin.py ============================================================================== --- tracker/roundup-src/roundup/admin.py (original) +++ tracker/roundup-src/roundup/admin.py Thu Aug 4 15:46:52 2011 @@ -21,7 +21,7 @@ """ __docformat__ = 'restructuredtext' -import csv, getopt, getpass, os, re, shutil, sys, UserDict +import csv, getopt, getpass, os, re, shutil, sys, UserDict, operator from roundup import date, hyperdb, roundupdb, init, password, token from roundup import __version__ as roundup_version @@ -37,16 +37,15 @@ """ _marker = [] def get(self, key, default=_marker): - if self.data.has_key(key): + if key in self.data: return [(key, self.data[key])] - keylist = self.data.keys() - keylist.sort() + keylist = sorted(self.data) l = [] for ki in keylist: if ki.startswith(key): l.append((ki, self.data[ki])) if not l and default is self._marker: - raise KeyError, key + raise KeyError(key) return l class AdminTool: @@ -63,11 +62,11 @@ """ def __init__(self): self.commands = CommandDict() - for k in AdminTool.__dict__.keys(): + for k in AdminTool.__dict__: if k[:3] == 'do_': self.commands[k[3:]] = getattr(self, k) self.help = {} - for k in AdminTool.__dict__.keys(): + for k in AdminTool.__dict__: if k[:5] == 'help_': self.help[k[5:]] = getattr(self, k) self.tracker_home = '' @@ -80,7 +79,7 @@ try: return self.db.getclass(classname) except KeyError: - raise UsageError, _('no such class "%(classname)s"')%locals() + raise UsageError(_('no such class "%(classname)s"')%locals()) def props_from_args(self, args): """ Produce a dictionary of prop: value from the args list. @@ -90,12 +89,12 @@ props = {} for arg in args: if arg.find('=') == -1: - raise UsageError, _('argument "%(arg)s" not propname=value' - )%locals() + raise UsageError(_('argument "%(arg)s" not propname=value' + )%locals()) l = arg.split('=') if len(l) < 2: - raise UsageError, _('argument "%(arg)s" not propname=value' - )%locals() + raise UsageError(_('argument "%(arg)s" not propname=value' + )%locals()) key, value = l[0], '='.join(l[1:]) if value: props[key] = value @@ -137,7 +136,7 @@ """ print _('Commands:'), commands = [''] - for command in self.commands.values(): + for command in self.commands.itervalues(): h = _(command.__doc__).split('\n')[0] commands.append(' '+h[7:]) commands.sort() @@ -150,10 +149,8 @@ def help_commands_html(self, indent_re=re.compile(r'^(\s+)\S+')): """ Produce an HTML command list. """ - commands = self.commands.values() - def sortfun(a, b): - return cmp(a.__name__, b.__name__) - commands.sort(sortfun) + commands = sorted(self.commands.itervalues(), + operator.attrgetter('__name__')) for command in commands: h = _(command.__doc__).split('\n') name = command.__name__[3:] @@ -255,7 +252,7 @@ # try help_ methods - if self.help.has_key(topic): + if topic in self.help: self.help[topic]() return 0 @@ -340,7 +337,7 @@ def help_initopts(self): templates = self.listTemplates() - print _('Templates:'), ', '.join(templates.keys()) + print _('Templates:'), ', '.join(templates) import roundup.backends backends = roundup.backends.list_backends() print _('Back ends:'), ', '.join(backends) @@ -369,19 +366,19 @@ See also initopts help. """ if len(args) < 1: - raise UsageError, _('Not enough arguments supplied') + raise UsageError(_('Not enough arguments supplied')) # make sure the tracker home can be created tracker_home = os.path.abspath(tracker_home) parent = os.path.split(tracker_home)[0] if not os.path.exists(parent): - raise UsageError, _('Instance home parent directory "%(parent)s"' - ' does not exist')%locals() + raise UsageError(_('Instance home parent directory "%(parent)s"' + ' does not exist')%locals()) config_ini_file = os.path.join(tracker_home, CoreConfig.INI_FILE) # check for both old- and new-style configs - if filter(os.path.exists, [config_ini_file, - os.path.join(tracker_home, 'config.py')]): + if list(filter(os.path.exists, [config_ini_file, + os.path.join(tracker_home, 'config.py')])): ok = raw_input(_( """WARNING: There appears to be a tracker in "%(tracker_home)s"! If you re-install it, you will lose all the data! @@ -395,9 +392,9 @@ # select template templates = self.listTemplates() template = len(args) > 1 and args[1] or '' - if not templates.has_key(template): - print _('Templates:'), ', '.join(templates.keys()) - while not templates.has_key(template): + if template not in templates: + print _('Templates:'), ', '.join(templates) + while template not in templates: template = raw_input(_('Select template [classic]: ')).strip() if not template: template = 'classic' @@ -439,8 +436,8 @@ need_set = CoreConfig(tracker_home)._get_unset_options() if need_set: print _(" ... at a minimum, you must set following options:") - for section, options in need_set.items(): - print " [%s]: %s" % (section, ", ".join(options)) + for section in need_set: + print " [%s]: %s" % (section, ", ".join(need_set[section])) # note about schema modifications print _(""" @@ -466,7 +463,7 @@ in . """ if len(args) < 1: - raise UsageError, _('Not enough arguments supplied') + raise UsageError(_('Not enough arguments supplied')) config = CoreConfig() config.save(args[0]) @@ -490,11 +487,11 @@ # make sure the tracker home is installed if not os.path.exists(tracker_home): - raise UsageError, _('Instance home does not exist')%locals() + raise UsageError(_('Instance home does not exist')%locals()) try: tracker = roundup.instance.open(tracker_home) except roundup.instance.TrackerError: - raise UsageError, _('Instance has not been installed')%locals() + raise UsageError(_('Instance has not been installed')%locals()) # is there already a database? if tracker.exists(): @@ -511,10 +508,10 @@ tracker.nuke() # re-write the backend select file - init.write_select_db(tracker_home, backend) + init.write_select_db(tracker_home, backend, tracker.config.DATABASE) # GO - tracker.init(password.Password(adminpw)) + tracker.init(password.Password(adminpw, config=tracker.config)) return 0 @@ -523,11 +520,14 @@ ''"""Usage: get property designator[,designator]* Get the given property of one or more designator(s). + A designator is a classname and a nodeid concatenated, + eg. bug1, user10, ... + Retrieves the property value of the nodes specified by the designators. """ if len(args) < 2: - raise UsageError, _('Not enough arguments supplied') + raise UsageError(_('Not enough arguments supplied')) propname = args[0] designators = args[1].split(',') l = [] @@ -536,7 +536,7 @@ try: classname, nodeid = hyperdb.splitDesignator(designator) except hyperdb.DesignatorError, message: - raise UsageError, message + raise UsageError(message) # get the class cl = self.get_class(classname) @@ -560,7 +560,9 @@ property = properties[propname] if not (isinstance(property, hyperdb.Multilink) or isinstance(property, hyperdb.Link)): - raise UsageError, _('property %s is not of type Multilink or Link so -d flag does not apply.')%propname + raise UsageError(_('property %s is not of type' + ' Multilink or Link so -d flag does not ' + 'apply.')%propname) propclassname = self.db.getclass(property.classname).classname id = cl.get(nodeid, propname) for i in id: @@ -575,7 +577,9 @@ property = properties[propname] if not (isinstance(property, hyperdb.Multilink) or isinstance(property, hyperdb.Link)): - raise UsageError, _('property %s is not of type Multilink or Link so -d flag does not apply.')%propname + raise UsageError(_('property %s is not of type' + ' Multilink or Link so -d flag does not ' + 'apply.')%propname) propclassname = self.db.getclass(property.classname).classname id = cl.get(nodeid, propname) for i in id: @@ -583,10 +587,11 @@ else: print cl.get(nodeid, propname) except IndexError: - raise UsageError, _('no such %(classname)s node "%(nodeid)s"')%locals() + raise UsageError(_('no such %(classname)s node ' + '"%(nodeid)s"')%locals()) except KeyError: - raise UsageError, _('no such %(classname)s property ' - '"%(propname)s"')%locals() + raise UsageError(_('no such %(classname)s property ' + '"%(propname)s"')%locals()) if self.separator: print self.separator.join(l) @@ -600,13 +605,16 @@ The items are specified as a class or as a comma-separated list of item designators (ie "designator[,designator,...]"). + A designator is a classname and a nodeid concatenated, + eg. bug1, user10, ... + This command sets the properties to the values for all designators given. If the value is missing (ie. "property=") then the property is un-set. If the property is a multilink, you specify the linked ids for the multilink as comma-separated numbers (ie "1,2,3"). """ if len(args) < 2: - raise UsageError, _('Not enough arguments supplied') + raise UsageError(_('Not enough arguments supplied')) from roundup import hyperdb designators = args[0].split(',') @@ -622,7 +630,7 @@ try: designators = [hyperdb.splitDesignator(x) for x in designators] except hyperdb.DesignatorError, message: - raise UsageError, message + raise UsageError(message) # get the props from the args props = self.props_from_args(args[1:]) @@ -637,14 +645,14 @@ props[key] = hyperdb.rawToHyperdb(self.db, cl, itemid, key, value) except hyperdb.HyperdbValueError, message: - raise UsageError, message + raise UsageError(message) # try the set try: - apply(cl.set, (itemid, ), props) + cl.set(itemid, **props) except (TypeError, IndexError, ValueError), message: import traceback; traceback.print_exc() - raise UsageError, message + raise UsageError(message) self.db_uncommitted = True return 0 @@ -657,7 +665,7 @@ value. """ if len(args) < 1: - raise UsageError, _('Not enough arguments supplied') + raise UsageError(_('Not enough arguments supplied')) classname = args[0] # get the class cl = self.get_class(classname) @@ -666,7 +674,7 @@ props = self.props_from_args(args[1:]) # convert the user-input value to a value used for find() - for propname, value in props.items(): + for propname, value in props.iteritems(): if ',' in value: values = value.split(',') else: @@ -686,26 +694,26 @@ designator = [] if self.separator: if self.print_designator: - id=apply(cl.find, (), props) + id = cl.find(**props) for i in id: designator.append(classname + i) print self.separator.join(designator) else: - print self.separator.join(apply(cl.find, (), props)) + print self.separator.join(cl.find(**props)) else: if self.print_designator: - id=apply(cl.find, (), props) + id = cl.find(**props) for i in id: designator.append(classname + i) print designator else: - print apply(cl.find, (), props) + print cl.find(**props) except KeyError: - raise UsageError, _('%(classname)s has no property ' - '"%(propname)s"')%locals() + raise UsageError(_('%(classname)s has no property ' + '"%(propname)s"')%locals()) except (ValueError, TypeError), message: - raise UsageError, message + raise UsageError(message) return 0 def do_specification(self, args): @@ -715,14 +723,15 @@ This lists the properties for a given class. """ if len(args) < 1: - raise UsageError, _('Not enough arguments supplied') + raise UsageError(_('Not enough arguments supplied')) classname = args[0] # get the class cl = self.get_class(classname) # get the key property keyprop = cl.getkey() - for key, value in cl.properties.items(): + for key in cl.properties: + value = cl.properties[key] if keyprop == key: print _('%(key)s: %(value)s (key property)')%locals() else: @@ -732,25 +741,27 @@ ''"""Usage: display designator[,designator]* Show the property values for the given node(s). + A designator is a classname and a nodeid concatenated, + eg. bug1, user10, ... + This lists the properties and their associated values for the given node. """ if len(args) < 1: - raise UsageError, _('Not enough arguments supplied') + raise UsageError(_('Not enough arguments supplied')) # decode the node designator for designator in args[0].split(','): try: classname, nodeid = hyperdb.splitDesignator(designator) except hyperdb.DesignatorError, message: - raise UsageError, message + raise UsageError(message) # get the class cl = self.get_class(classname) # display the values - keys = cl.properties.keys() - keys.sort() + keys = sorted(cl.properties) for key in keys: value = cl.get(nodeid, key) print _('%(key)s: %(value)s')%locals() @@ -764,7 +775,7 @@ command. """ if len(args) < 1: - raise UsageError, _('Not enough arguments supplied') + raise UsageError(_('Not enough arguments supplied')) from roundup import hyperdb classname = args[0] @@ -777,8 +788,9 @@ properties = cl.getprops(protected = 0) if len(args) == 1: # ask for the properties - for key, value in properties.items(): + for key in properties: if key == 'id': continue + value = properties[key] name = value.__class__.__name__ if isinstance(value , hyperdb.Password): again = None @@ -799,24 +811,24 @@ props = self.props_from_args(args[1:]) # convert types - for propname, value in props.items(): + for propname in props: try: props[propname] = hyperdb.rawToHyperdb(self.db, cl, None, - propname, value) + propname, props[propname]) except hyperdb.HyperdbValueError, message: - raise UsageError, message + raise UsageError(message) # check for the key property propname = cl.getkey() - if propname and not props.has_key(propname): - raise UsageError, _('you must provide the "%(propname)s" ' - 'property.')%locals() + if propname and propname not in props: + raise UsageError(_('you must provide the "%(propname)s" ' + 'property.')%locals()) # do the actual create try: - print apply(cl.create, (), props) + print cl.create(**props) except (TypeError, IndexError, ValueError), message: - raise UsageError, message + raise UsageError(message) self.db_uncommitted = True return 0 @@ -834,9 +846,9 @@ for every class instance. """ if len(args) > 2: - raise UsageError, _('Too many arguments supplied') + raise UsageError(_('Too many arguments supplied')) if len(args) < 1: - raise UsageError, _('Not enough arguments supplied') + raise UsageError(_('Not enough arguments supplied')) classname = args[0] # get the class @@ -856,8 +868,8 @@ try: proplist.append(cl.get(nodeid, propname)) except KeyError: - raise UsageError, _('%(classname)s has no property ' - '"%(propname)s"')%locals() + raise UsageError(_('%(classname)s has no property ' + '"%(propname)s"')%locals()) print self.separator.join(proplist) else: # create a list of index id's since user didn't specify @@ -868,8 +880,8 @@ try: value = cl.get(nodeid, propname) except KeyError: - raise UsageError, _('%(classname)s has no property ' - '"%(propname)s"')%locals() + raise UsageError(_('%(classname)s has no property ' + '"%(propname)s"')%locals()) print _('%(nodeid)4s: %(value)s')%locals() return 0 @@ -903,7 +915,7 @@ will result in a the 4 character wide "Name" column. """ if len(args) < 1: - raise UsageError, _('Not enough arguments supplied') + raise UsageError(_('Not enough arguments supplied')) classname = args[0] # get the class @@ -918,14 +930,15 @@ try: propname, width = spec.split(':') except (ValueError, TypeError): - raise UsageError, _('"%(spec)s" not name:width')%locals() + raise UsageError(_('"%(spec)s" not ' + 'name:width')%locals()) else: propname = spec - if not all_props.has_key(propname): - raise UsageError, _('%(classname)s has no property ' - '"%(propname)s"')%locals() + if propname not in all_props: + raise UsageError(_('%(classname)s has no property ' + '"%(propname)s"')%locals()) else: - prop_names = cl.getprops().keys() + prop_names = cl.getprops() # now figure column widths props = [] @@ -971,21 +984,25 @@ ''"""Usage: history designator Show the history entries of a designator. + A designator is a classname and a nodeid concatenated, + eg. bug1, user10, ... + Lists the journal entries for the node identified by the designator. """ if len(args) < 1: - raise UsageError, _('Not enough arguments supplied') + raise UsageError(_('Not enough arguments supplied')) try: classname, nodeid = hyperdb.splitDesignator(args[0]) except hyperdb.DesignatorError, message: - raise UsageError, message + raise UsageError(message) try: print self.db.getclass(classname).history(nodeid) except KeyError: - raise UsageError, _('no such class "%(classname)s"')%locals() + raise UsageError(_('no such class "%(classname)s"')%locals()) except IndexError: - raise UsageError, _('no such %(classname)s node "%(nodeid)s"')%locals() + raise UsageError(_('no such %(classname)s node ' + '"%(nodeid)s"')%locals()) return 0 def do_commit(self, args): @@ -1020,23 +1037,27 @@ ''"""Usage: retire designator[,designator]* Retire the node specified by designator. + A designator is a classname and a nodeid concatenated, + eg. bug1, user10, ... + This action indicates that a particular node is not to be retrieved by the list or find commands, and its key value may be re-used. """ if len(args) < 1: - raise UsageError, _('Not enough arguments supplied') + raise UsageError(_('Not enough arguments supplied')) designators = args[0].split(',') for designator in designators: try: classname, nodeid = hyperdb.splitDesignator(designator) except hyperdb.DesignatorError, message: - raise UsageError, message + raise UsageError(message) try: self.db.getclass(classname).retire(nodeid) except KeyError: - raise UsageError, _('no such class "%(classname)s"')%locals() + raise UsageError(_('no such class "%(classname)s"')%locals()) except IndexError: - raise UsageError, _('no such %(classname)s node "%(nodeid)s"')%locals() + raise UsageError(_('no such %(classname)s node ' + '"%(nodeid)s"')%locals()) self.db_uncommitted = True return 0 @@ -1044,22 +1065,26 @@ ''"""Usage: restore designator[,designator]* Restore the retired node specified by designator. + A designator is a classname and a nodeid concatenated, + eg. bug1, user10, ... + The given nodes will become available for users again. """ if len(args) < 1: - raise UsageError, _('Not enough arguments supplied') + raise UsageError(_('Not enough arguments supplied')) designators = args[0].split(',') for designator in designators: try: classname, nodeid = hyperdb.splitDesignator(designator) except hyperdb.DesignatorError, message: - raise UsageError, message + raise UsageError(message) try: self.db.getclass(classname).restore(nodeid) except KeyError: - raise UsageError, _('no such class "%(classname)s"')%locals() + raise UsageError(_('no such class "%(classname)s"')%locals()) except IndexError: - raise UsageError, _('no such %(classname)s node "%(nodeid)s"')%locals() + raise UsageError(_('no such %(classname)s node ' + '"%(nodeid)s"')%locals()) self.db_uncommitted = True return 0 @@ -1078,19 +1103,19 @@ """ # grab the directory to export to if len(args) < 1: - raise UsageError, _('Not enough arguments supplied') + raise UsageError(_('Not enough arguments supplied')) dir = args[-1] # get the list of classes to export if len(args) == 2: if args[0].startswith('-'): - classes = [ c for c in self.db.classes.keys() + classes = [ c for c in self.db.classes if not c in args[0][1:].split(',') ] else: classes = args[0].split(',') else: - classes = self.db.classes.keys() + classes = self.db.classes class colon_separated(csv.excel): delimiter = ':' @@ -1148,7 +1173,8 @@ sys.stdout.write("\nExporting Journal for %s\n" % classname) sys.stdout.flush() journals = csv.writer(jf, colon_separated) - map(journals.writerow, cl.export_journals()) + for row in cl.export_journals(): + journals.writerow(row) jf.close() if max_len > self.db.config.CSV_FIELD_SIZE: print >> sys.stderr, \ @@ -1191,7 +1217,7 @@ database (or, tediously, retire all the old data.) """ if len(args) < 1: - raise UsageError, _('Not enough arguments supplied') + raise UsageError(_('Not enough arguments supplied')) from roundup import hyperdb if hasattr (csv, 'field_size_limit'): @@ -1232,7 +1258,10 @@ if hasattr(cl, 'import_files'): cl.import_files(dir, nodeid) maxid = max(maxid, int(nodeid)) + + # (print to sys.stdout here to allow tests to squash it .. ugh) print >> sys.stdout + f.close() # import the journals @@ -1241,8 +1270,10 @@ cl.import_journals(reader) f.close() - # set the id counter + # (print to sys.stdout here to allow tests to squash it .. ugh) print >> sys.stdout, 'setting', classname, maxid+1 + + # set the id counter self.db.setid(classname, str(maxid+1)) self.db_uncommitted = True @@ -1266,8 +1297,8 @@ 2001-01-01 """ - if len(args) <> 1: - raise UsageError, _('Not enough arguments supplied') + if len(args) != 1: + raise UsageError(_('Not enough arguments supplied')) # are we dealing with a period or a date value = args[0] @@ -1277,7 +1308,7 @@ """, re.VERBOSE) m = date_re.match(value) if not m: - raise ValueError, _('Invalid format') + raise ValueError(_('Invalid format')) m = m.groupdict() if m['period']: pack_before = date.Date(". - %s"%value) @@ -1302,8 +1333,8 @@ try: cl.index(m.group(2)) except IndexError: - raise UsageError, _('no such item "%(designator)s"')%{ - 'designator': arg} + raise UsageError(_('no such item "%(designator)s"')%{ + 'designator': arg}) else: cl = self.get_class(arg) self.db.reindex(arg) @@ -1323,7 +1354,7 @@ print _('No such Role "%(role)s"')%locals() return 1 else: - roles = self.db.security.role.items() + roles = list(self.db.security.role.items()) role = self.db.config.NEW_WEB_USER_ROLES if ',' in role: print _('New Web users get the Roles "%(role)s"')%locals() @@ -1499,7 +1530,7 @@ self.tracker_home = os.environ.get('TRACKER_HOME', '') # TODO: reinstate the user/password stuff (-u arg too) name = password = '' - if os.environ.has_key('ROUNDUP_LOGIN'): + if 'ROUNDUP_LOGIN' in os.environ: l = os.environ['ROUNDUP_LOGIN'].split(':') name = l[0] if len(l) > 1: Added: tracker/roundup-src/roundup/anypy/cookie_.py ============================================================================== --- (empty file) +++ tracker/roundup-src/roundup/anypy/cookie_.py Thu Aug 4 15:46:52 2011 @@ -0,0 +1,8 @@ + +try: + from http import cookies as Cookie + from http.cookies import CookieError, BaseCookie, SimpleCookie + from http.cookies import _getdate as get_cookie_date +except: + from Cookie import CookieError, BaseCookie, SimpleCookie + from Cookie import _getdate as get_cookie_date Added: tracker/roundup-src/roundup/anypy/dbm_.py ============================================================================== --- (empty file) +++ tracker/roundup-src/roundup/anypy/dbm_.py Thu Aug 4 15:46:52 2011 @@ -0,0 +1,20 @@ +# In Python 3 the "anydbm" module was renamed to be "dbm" which is now a +# package containing the various implementations. The "wichdb" module's +# whichdb() function was moved to the new "dbm" module. + +import sys +if sys.version_info[:2] < (2, 6): + def key_in(db, key): + return db.has_key(key) +else: + def key_in(db, key): + return key in db + +try: + # old school first because <3 had a "dbm" module too... + import anydbm + from whichdb import whichdb +except ImportError: + # python 3+ + import dbm as anydbm + whichdb = anydbm.whichdb Added: tracker/roundup-src/roundup/anypy/email_.py ============================================================================== --- (empty file) +++ tracker/roundup-src/roundup/anypy/email_.py Thu Aug 4 15:46:52 2011 @@ -0,0 +1,19 @@ +try: + # Python 2.5+ + from email.parser import FeedParser +except ImportError: + # Python 2.4 + try : + from email.Parser import FeedParser + except ImportError: + from email.Parser import Parser + class FeedParser: + def __init__(self): + self.content = [] + + def feed(self, s): + self.content.append(s) + + def close(self): + p = Parser() + return p.parsestr(''.join(self.content)) Modified: tracker/roundup-src/roundup/anypy/hashlib_.py ============================================================================== --- tracker/roundup-src/roundup/anypy/hashlib_.py (original) +++ tracker/roundup-src/roundup/anypy/hashlib_.py Thu Aug 4 15:46:52 2011 @@ -4,8 +4,10 @@ try: from hashlib import md5, sha1 # new in Python 2.5 + shamodule = sha1 except ImportError: from md5 import md5 # deprecated in Python 2.6 from sha import sha as sha1 # deprecated in Python 2.6 + import sha as shamodule # vim: ts=8 sts=4 sw=4 si Added: tracker/roundup-src/roundup/anypy/http_.py ============================================================================== --- (empty file) +++ tracker/roundup-src/roundup/anypy/http_.py Thu Aug 4 15:46:52 2011 @@ -0,0 +1,5 @@ +try: + from http import client +except: + import httplib as client + Added: tracker/roundup-src/roundup/anypy/io_.py ============================================================================== --- (empty file) +++ tracker/roundup-src/roundup/anypy/io_.py Thu Aug 4 15:46:52 2011 @@ -0,0 +1,7 @@ + +try: + from io import StringIO, BytesIO +except: + from StringIO import StringIO + BytesIO = StringIO + Modified: tracker/roundup-src/roundup/anypy/sets_.py ============================================================================== --- tracker/roundup-src/roundup/anypy/sets_.py (original) +++ tracker/roundup-src/roundup/anypy/sets_.py Thu Aug 4 15:46:52 2011 @@ -24,7 +24,7 @@ try: set = set # built-in since Python 2.4 -except NameError, TypeError: +except (NameError, TypeError): from sets import Set as set # deprecated as of Python 2.6 # vim: ts=8 sts=4 sw=4 si et Added: tracker/roundup-src/roundup/anypy/urllib_.py ============================================================================== --- (empty file) +++ tracker/roundup-src/roundup/anypy/urllib_.py Thu Aug 4 15:46:52 2011 @@ -0,0 +1,6 @@ + +try: + from urllib.parse import quote, urlparse +except: + from urllib import quote + from urlparse import urlparse Modified: tracker/roundup-src/roundup/backends/__init__.py ============================================================================== --- tracker/roundup-src/roundup/backends/__init__.py (original) +++ tracker/roundup-src/roundup/backends/__init__.py Thu Aug 4 15:46:52 2011 @@ -38,7 +38,7 @@ '''Get a specific backend by name.''' vars = globals() # if requested backend has been imported yet, return current instance - if vars.has_key(name): + if name in vars: return vars[name] # import the backend module module_name = 'back_%s' % name Modified: tracker/roundup-src/roundup/backends/back_anydbm.py ============================================================================== --- tracker/roundup-src/roundup/backends/back_anydbm.py (original) +++ tracker/roundup-src/roundup/backends/back_anydbm.py Thu Aug 4 15:46:52 2011 @@ -22,30 +22,22 @@ """ __docformat__ = 'restructuredtext' -try: - import anydbm, sys - # dumbdbm only works in python 2.1.2+ - if sys.version_info < (2,1,2): - import dumbdbm - assert anydbm._defaultmod != dumbdbm - del dumbdbm -except AssertionError: - print "WARNING: you should upgrade to python 2.1.3" +import os, marshal, re, weakref, string, copy, time, shutil, logging -import whichdb, os, marshal, re, weakref, string, copy, time, shutil, logging +from roundup.anypy.dbm_ import anydbm, whichdb, key_in from roundup import hyperdb, date, password, roundupdb, security, support from roundup.support import reversed from roundup.backends import locking from roundup.i18n import _ -from blobfiles import FileStorage -from sessions_dbm import Sessions, OneTimeKeys +from roundup.backends.blobfiles import FileStorage +from roundup.backends.sessions_dbm import Sessions, OneTimeKeys try: - from indexer_xapian import Indexer + from roundup.backends.indexer_xapian import Indexer except ImportError: - from indexer_dbm import Indexer + from roundup.backends.indexer_dbm import Indexer def db_exists(config): # check for the user db @@ -57,6 +49,87 @@ def db_nuke(config): shutil.rmtree(config.DATABASE) +class Binary: + + def __init__(self, x, y): + self.x = x + self.y = y + + def visit(self, visitor): + self.x.visit(visitor) + self.y.visit(visitor) + +class Unary: + + def __init__(self, x): + self.x = x + + def generate(self, atom): + return atom(self) + + def visit(self, visitor): + self.x.visit(visitor) + +class Equals(Unary): + + def evaluate(self, v): + return self.x in v + + def visit(self, visitor): + visitor(self) + +class Not(Unary): + + def evaluate(self, v): + return not self.x.evaluate(v) + + def generate(self, atom): + return "NOT(%s)" % self.x.generate(atom) + +class Or(Binary): + + def evaluate(self, v): + return self.x.evaluate(v) or self.y.evaluate(v) + + def generate(self, atom): + return "(%s)OR(%s)" % ( + self.x.generate(atom), + self.y.generate(atom)) + +class And(Binary): + + def evaluate(self, v): + return self.x.evaluate(v) and self.y.evaluate(v) + + def generate(self, atom): + return "(%s)AND(%s)" % ( + self.x.generate(atom), + self.y.generate(atom)) + +def compile_expression(opcodes): + + stack = [] + push, pop = stack.append, stack.pop + for opcode in opcodes: + if opcode == -2: push(Not(pop())) + elif opcode == -3: push(And(pop(), pop())) + elif opcode == -4: push(Or(pop(), pop())) + else: push(Equals(opcode)) + + return pop() + +class Expression: + + def __init__(self, v): + try: + opcodes = [int(x) for x in v] + if min(opcodes) >= -1: raise ValueError() + + compiled = compile_expression(opcodes) + self.evaluate = lambda x: compiled.evaluate([int(y) for y in x]) + except: + self.evaluate = lambda x: bool(set(x) & set(v)) + # # Now the database # @@ -146,13 +219,13 @@ # def __getattr__(self, classname): """A convenient way of calling self.getclass(classname).""" - if self.classes.has_key(classname): + if classname in self.classes: return self.classes[classname] raise AttributeError, classname def addclass(self, cl): cn = cl.classname - if self.classes.has_key(cn): + if cn in self.classes: raise ValueError, cn self.classes[cn] = cl @@ -163,6 +236,8 @@ description="User is allowed to edit "+cn) self.security.addPermission(name="View", klass=cn, description="User is allowed to access "+cn) + self.security.addPermission(name="Retire", klass=cn, + description="User is allowed to retire "+cn) def getclasses(self): """Return a list of the names of all existing classes.""" @@ -178,7 +253,7 @@ try: return self.classes[classname] except KeyError: - raise KeyError, 'There is no class called "%s"'%classname + raise KeyError('There is no class called "%s"'%classname) # # Class DBs @@ -186,8 +261,8 @@ def clear(self): """Delete all database contents """ - logging.getLogger('hyperdb').info('clear') - for cn in self.classes.keys(): + logging.getLogger('roundup.hyperdb').info('clear') + for cn in self.classes: for dummy in 'nodes', 'journals': path = os.path.join(self.dir, 'journals.%s'%cn) if os.path.exists(path): @@ -212,10 +287,9 @@ """ db_type = '' if os.path.exists(path): - db_type = whichdb.whichdb(path) + db_type = whichdb(path) if not db_type: - raise hyperdb.DatabaseError, \ - _("Couldn't identify database type") + raise hyperdb.DatabaseError(_("Couldn't identify database type")) elif os.path.exists(path+'.db'): # if the path ends in '.db', it's a dbm database, whether # anydbm says it's dbhash or not! @@ -231,21 +305,24 @@ db_type = self.determine_db_type(path) # new database? let anydbm pick the best dbm - if not db_type: + # in Python 3+ the "dbm" ("anydbm" to us) module already uses the + # whichdb() function to do this + if not db_type or hasattr(anydbm, 'whichdb'): if __debug__: - logging.getLogger('hyperdb').debug("opendb anydbm.open(%r, 'c')"%path) + logging.getLogger('roundup.hyperdb').debug( + "opendb anydbm.open(%r, 'c')"%path) return anydbm.open(path, 'c') - # open the database with the correct module + # in Python <3 it anydbm was a little dumb so manually open the + # database with the correct module try: dbm = __import__(db_type) except ImportError: - raise hyperdb.DatabaseError, \ - _("Couldn't open database - the required module '%s'"\ - " is not available")%db_type + raise hyperdb.DatabaseError(_("Couldn't open database - the " + "required module '%s' is not available")%db_type) if __debug__: - logging.getLogger('hyperdb').debug("opendb %r.open(%r, %r)"%(db_type, path, - mode)) + logging.getLogger('roundup.hyperdb').debug( + "opendb %r.open(%r, %r)"%(db_type, path, mode)) return dbm.open(path, mode) # @@ -256,7 +333,7 @@ """ # open the ids DB - create if if doesn't exist db = self.opendb('_ids', 'c') - if db.has_key(classname): + if key_in(db, classname): newid = db[classname] = str(int(db[classname]) + 1) else: # the count() bit is transitional - older dbs won't start at 1 @@ -280,7 +357,7 @@ """ add the specified node to its class's db """ # we'll be supplied these props if we're doing an import - if not node.has_key('creator'): + if 'creator' not in node: # add in the "calculated" properties (dupe so we don't affect # calling code's node assumptions) node = node.copy() @@ -305,7 +382,8 @@ """ perform the saving of data specified by the set/addnode """ if __debug__: - logging.getLogger('hyperdb').debug('save %s%s %r'%(classname, nodeid, node)) + logging.getLogger('roundup.hyperdb').debug( + 'save %s%s %r'%(classname, nodeid, node)) self.transactions.append((self.doSaveNode, (classname, nodeid, node))) def getnode(self, classname, nodeid, db=None, cache=1): @@ -316,27 +394,29 @@ """ # try the cache cache_dict = self.cache.setdefault(classname, {}) - if cache_dict.has_key(nodeid): + if nodeid in cache_dict: if __debug__: - logging.getLogger('hyperdb').debug('get %s%s cached'%(classname, nodeid)) + logging.getLogger('roundup.hyperdb').debug( + 'get %s%s cached'%(classname, nodeid)) self.stats['cache_hits'] += 1 return cache_dict[nodeid] if __debug__: self.stats['cache_misses'] += 1 start_t = time.time() - logging.getLogger('hyperdb').debug('get %s%s'%(classname, nodeid)) + logging.getLogger('roundup.hyperdb').debug( + 'get %s%s'%(classname, nodeid)) # get from the database and save in the cache if db is None: db = self.getclassdb(classname) - if not db.has_key(nodeid): - raise IndexError, "no such %s %s"%(classname, nodeid) + if not key_in(db, nodeid): + raise IndexError("no such %s %s"%(classname, nodeid)) # check the uncommitted, destroyed nodes - if (self.destroyednodes.has_key(classname) and - self.destroyednodes[classname].has_key(nodeid)): - raise IndexError, "no such %s %s"%(classname, nodeid) + if (classname in self.destroyednodes and + nodeid in self.destroyednodes[classname]): + raise IndexError("no such %s %s"%(classname, nodeid)) # decode res = marshal.loads(db[nodeid]) @@ -357,14 +437,13 @@ """Remove a node from the database. Called exclusively by the destroy() method on Class. """ - logging.getLogger('hyperdb').info('destroy %s%s'%(classname, nodeid)) + logging.getLogger('roundup.hyperdb').info( + 'destroy %s%s'%(classname, nodeid)) # remove from cache and newnodes if it's there - if (self.cache.has_key(classname) and - self.cache[classname].has_key(nodeid)): + if (classname in self.cache and nodeid in self.cache[classname]): del self.cache[classname][nodeid] - if (self.newnodes.has_key(classname) and - self.newnodes[classname].has_key(nodeid)): + if (classname in self.newnodes and nodeid in self.newnodes[classname]): del self.newnodes[classname][nodeid] # see if there's any obvious commit actions that we should get rid of @@ -385,13 +464,13 @@ """ properties = self.getclass(classname).getprops() d = {} - for k, v in node.items(): + for k, v in node.iteritems(): if k == self.RETIRED_FLAG: d[k] = v continue # if the property doesn't exist then we really don't care - if not properties.has_key(k): + if k not in properties: continue # get the property spec @@ -412,10 +491,10 @@ """ properties = self.getclass(classname).getprops() d = {} - for k, v in node.items(): + for k, v in node.iteritems(): # if the property doesn't exist, or is the "retired" flag then # it won't be in the properties dict - if not properties.has_key(k): + if k not in properties: d[k] = v continue @@ -427,9 +506,7 @@ elif isinstance(prop, hyperdb.Interval) and v is not None: d[k] = date.Interval(v) elif isinstance(prop, hyperdb.Password) and v is not None: - p = password.Password() - p.unpack(v) - d[k] = p + d[k] = password.Password(encrypted=v) else: d[k] = v return d @@ -439,29 +516,27 @@ """ # try the cache cache = self.cache.setdefault(classname, {}) - if cache.has_key(nodeid): + if nodeid in cache: return 1 # not in the cache - check the database if db is None: db = self.getclassdb(classname) - res = db.has_key(nodeid) - return res + return key_in(db, nodeid) def countnodes(self, classname, db=None): count = 0 # include the uncommitted nodes - if self.newnodes.has_key(classname): + if classname in self.newnodes: count += len(self.newnodes[classname]) - if self.destroyednodes.has_key(classname): + if classname in self.destroyednodes: count -= len(self.destroyednodes[classname]) # and count those in the DB if db is None: db = self.getclassdb(classname) - count = count + len(db.keys()) - return count + return count + len(db) # @@ -484,7 +559,8 @@ the current user. """ if __debug__: - logging.getLogger('hyperdb').debug('addjournal %s%s %s %r %s %r'%(classname, + logging.getLogger('roundup.hyperdb').debug( + 'addjournal %s%s %s %r %s %r'%(classname, nodeid, action, params, creator, creation)) if creator is None: creator = self.getuid() @@ -494,8 +570,8 @@ def setjournal(self, classname, nodeid, journal): """Set the journal to the "journal" list.""" if __debug__: - logging.getLogger('hyperdb').debug('setjournal %s%s %r'%(classname, - nodeid, journal)) + logging.getLogger('roundup.hyperdb').debug( + 'setjournal %s%s %r'%(classname, nodeid, journal)) self.transactions.append((self.doSetJournal, (classname, nodeid, journal))) @@ -529,14 +605,14 @@ db = self.opendb('journals.%s'%classname, 'r') except anydbm.error, error: if str(error) == "need 'c' or 'n' flag to open new db": - raise IndexError, 'no such %s %s'%(classname, nodeid) + raise IndexError('no such %s %s'%(classname, nodeid)) elif error.args[0] != 2: # this isn't a "not found" error, be alarmed! raise if res: # we have unsaved journal entries, return them return res - raise IndexError, 'no such %s %s'%(classname, nodeid) + raise IndexError('no such %s %s'%(classname, nodeid)) try: journal = marshal.loads(db[nodeid]) except KeyError: @@ -544,7 +620,7 @@ if res: # we have some unsaved journal entries, be happy! return res - raise IndexError, 'no such %s %s'%(classname, nodeid) + raise IndexError('no such %s %s'%(classname, nodeid)) db.close() # add all the saved journal entries for this node @@ -581,8 +657,8 @@ packed += 1 db[key] = marshal.dumps(l) - logging.getLogger('hyperdb').info('packed %d %s items'%(packed, - classname)) + logging.getLogger('roundup.hyperdb').info( + 'packed %d %s items'%(packed, classname)) if db_type == 'gdbm': db.reorganize() @@ -604,7 +680,7 @@ The only backend this seems to affect is postgres. """ - logging.getLogger('hyperdb').info('commit %s transactions'%( + logging.getLogger('roundup.hyperdb').info('commit %s transactions'%( len(self.transactions))) # keep a handle to all the database files opened @@ -617,7 +693,7 @@ reindex[method(*args)] = 1 finally: # make sure we close all the database files - for db in self.databases.values(): + for db in self.databases.itervalues(): db.close() del self.databases @@ -627,7 +703,7 @@ self.transactions = [] # reindex the nodes that request it - for classname, nodeid in filter(None, reindex.keys()): + for classname, nodeid in [k for k in reindex if k]: self.getclass(classname).index(nodeid) # save the indexer state @@ -648,7 +724,7 @@ """ # get the database handle db_name = 'nodes.%s'%classname - if not self.databases.has_key(db_name): + if db_name not in self.databases: self.databases[db_name] = self.getclassdb(classname, 'c') return self.databases[db_name] @@ -666,7 +742,7 @@ """ # get the database handle db_name = 'journals.%s'%classname - if not self.databases.has_key(db_name): + if db_name not in self.databases: self.databases[db_name] = self.opendb(db_name, 'c') return self.databases[db_name] @@ -691,7 +767,7 @@ db = self.getCachedJournalDB(classname) # now insert the journal entry - if db.has_key(nodeid): + if key_in(db, nodeid): # append to existing s = db[nodeid] l = marshal.loads(s) @@ -716,18 +792,18 @@ def doDestroyNode(self, classname, nodeid): # delete from the class database db = self.getCachedClassDB(classname) - if db.has_key(nodeid): + if key_in(db, nodeid): del db[nodeid] # delete from the database db = self.getCachedJournalDB(classname) - if db.has_key(nodeid): + if key_in(db, nodeid): del db[nodeid] def rollback(self): """ Reverse all actions from the current transaction. """ - logging.getLogger('hyperdb').info('rollback %s transactions'%( + logging.getLogger('roundup.hyperdb').info('rollback %s transactions'%( len(self.transactions))) for method, args in self.transactions: @@ -784,6 +860,8 @@ These operations trigger detectors and can be vetoed. Attempts to modify the "creation" or "activity" properties cause a KeyError. """ + if self.db.journaltag is None: + raise hyperdb.DatabaseError(_('Database open read-only')) self.fireAuditors('create', None, propvalues) newid = self.create_inner(**propvalues) self.fireReactors('create', newid, None) @@ -792,48 +870,49 @@ def create_inner(self, **propvalues): """ Called by create, in-between the audit and react calls. """ - if propvalues.has_key('id'): - raise KeyError, '"id" is reserved' + if 'id' in propvalues: + raise KeyError('"id" is reserved') if self.db.journaltag is None: - raise hyperdb.DatabaseError, _('Database open read-only') + raise hyperdb.DatabaseError(_('Database open read-only')) - if propvalues.has_key('creation') or propvalues.has_key('activity'): - raise KeyError, '"creation" and "activity" are reserved' + if 'creation' in propvalues or 'activity' in propvalues: + raise KeyError('"creation" and "activity" are reserved') # new node's id newid = self.db.newid(self.classname) # validate propvalues num_re = re.compile('^\d+$') - for key, value in propvalues.items(): + for key, value in propvalues.iteritems(): if key == self.key: try: self.lookup(value) except KeyError: pass else: - raise ValueError, 'node with key "%s" exists'%value + raise ValueError('node with key "%s" exists'%value) # try to handle this property try: prop = self.properties[key] except KeyError: - raise KeyError, '"%s" has no property "%s"'%(self.classname, - key) + raise KeyError('"%s" has no property "%s"'%(self.classname, + key)) if value is not None and isinstance(prop, hyperdb.Link): if type(value) != type(''): - raise ValueError, 'link value must be String' + raise ValueError('link value must be String') link_class = self.properties[key].classname # if it isn't a number, it's a key if not num_re.match(value): try: value = self.db.classes[link_class].lookup(value) except (TypeError, KeyError): - raise IndexError, 'new property "%s": %s not a %s'%( - key, value, link_class) + raise IndexError('new property "%s": %s not a %s'%( + key, value, link_class)) elif not self.db.getclass(link_class).hasnode(value): - raise IndexError, '%s has no node %s'%(link_class, value) + raise IndexError('%s has no node %s'%(link_class, + value)) # save off the value propvalues[key] = value @@ -847,22 +926,22 @@ if value is None: value = [] if not hasattr(value, '__iter__'): - raise TypeError, 'new property "%s" not an iterable of ids'%key + raise TypeError('new property "%s" not an iterable of ids'%key) # clean up and validate the list of links link_class = self.properties[key].classname l = [] for entry in value: if type(entry) != type(''): - raise ValueError, '"%s" multilink value (%r) '\ - 'must contain Strings'%(key, value) + raise ValueError('"%s" multilink value (%r) '\ + 'must contain Strings'%(key, value)) # if it isn't a number, it's a key if not num_re.match(entry): try: entry = self.db.classes[link_class].lookup(entry) except (TypeError, KeyError): - raise IndexError, 'new property "%s": %s not a %s'%( - key, entry, self.properties[key].classname) + raise IndexError('new property "%s": %s not a %s'%( + key, entry, self.properties[key].classname)) l.append(entry) value = l propvalues[key] = value @@ -870,8 +949,8 @@ # handle additions for nodeid in value: if not self.db.getclass(link_class).hasnode(nodeid): - raise IndexError, '%s has no node %s'%(link_class, - nodeid) + raise IndexError('%s has no node %s'%(link_class, + nodeid)) # register the link with the newly linked node if self.do_journal and self.properties[key].do_journal: self.db.addjournal(link_class, nodeid, 'link', @@ -879,41 +958,41 @@ elif isinstance(prop, hyperdb.String): if type(value) != type('') and type(value) != type(u''): - raise TypeError, 'new property "%s" not a string'%key + raise TypeError('new property "%s" not a string'%key) if prop.indexme: self.db.indexer.add_text((self.classname, newid, key), value) elif isinstance(prop, hyperdb.Password): if not isinstance(value, password.Password): - raise TypeError, 'new property "%s" not a Password'%key + raise TypeError('new property "%s" not a Password'%key) elif isinstance(prop, hyperdb.Date): if value is not None and not isinstance(value, date.Date): - raise TypeError, 'new property "%s" not a Date'%key + raise TypeError('new property "%s" not a Date'%key) elif isinstance(prop, hyperdb.Interval): if value is not None and not isinstance(value, date.Interval): - raise TypeError, 'new property "%s" not an Interval'%key + raise TypeError('new property "%s" not an Interval'%key) elif value is not None and isinstance(prop, hyperdb.Number): try: float(value) except ValueError: - raise TypeError, 'new property "%s" not numeric'%key + raise TypeError('new property "%s" not numeric'%key) elif value is not None and isinstance(prop, hyperdb.Boolean): try: int(value) except ValueError: - raise TypeError, 'new property "%s" not boolean'%key + raise TypeError('new property "%s" not boolean'%key) # make sure there's data where there needs to be - for key, prop in self.properties.items(): - if propvalues.has_key(key): + for key, prop in self.properties.iteritems(): + if key in propvalues: continue if key == self.key: - raise ValueError, 'key property "%s" is required'%key + raise ValueError('key property "%s" is required'%key) if isinstance(prop, hyperdb.Multilink): propvalues[key] = [] @@ -944,21 +1023,21 @@ # check for one of the special props if propname == 'creation': - if d.has_key('creation'): + if 'creation' in d: return d['creation'] if not self.do_journal: - raise ValueError, 'Journalling is disabled for this class' + raise ValueError('Journalling is disabled for this class') journal = self.db.getjournal(self.classname, nodeid) if journal: - return self.db.getjournal(self.classname, nodeid)[0][1] + return journal[0][1] else: # on the strange chance that there's no journal return date.Date() if propname == 'activity': - if d.has_key('activity'): + if 'activity' in d: return d['activity'] if not self.do_journal: - raise ValueError, 'Journalling is disabled for this class' + raise ValueError('Journalling is disabled for this class') journal = self.db.getjournal(self.classname, nodeid) if journal: return self.db.getjournal(self.classname, nodeid)[-1][1] @@ -966,10 +1045,10 @@ # on the strange chance that there's no journal return date.Date() if propname == 'creator': - if d.has_key('creator'): + if 'creator' in d: return d['creator'] if not self.do_journal: - raise ValueError, 'Journalling is disabled for this class' + raise ValueError('Journalling is disabled for this class') journal = self.db.getjournal(self.classname, nodeid) if journal: num_re = re.compile('^\d+$') @@ -986,10 +1065,10 @@ else: return self.db.getuid() if propname == 'actor': - if d.has_key('actor'): + if 'actor' in d: return d['actor'] if not self.do_journal: - raise ValueError, 'Journalling is disabled for this class' + raise ValueError('Journalling is disabled for this class') journal = self.db.getjournal(self.classname, nodeid) if journal: num_re = re.compile('^\d+$') @@ -1009,7 +1088,7 @@ # get the property (raises KeyErorr if invalid) prop = self.properties[propname] - if not d.has_key(propname): + if propname not in d: if default is _marker: if isinstance(prop, hyperdb.Multilink): return [] @@ -1045,10 +1124,13 @@ These operations trigger detectors and can be vetoed. Attempts to modify the "creation" or "activity" properties cause a KeyError. """ + if self.db.journaltag is None: + raise hyperdb.DatabaseError(_('Database open read-only')) + self.fireAuditors('set', nodeid, propvalues) oldvalues = copy.deepcopy(self.db.getnode(self.classname, nodeid)) - for name,prop in self.getprops(protected=0).items(): - if oldvalues.has_key(name): + for name, prop in self.getprops(protected=0).iteritems(): + if name in oldvalues: continue if isinstance(prop, hyperdb.Multilink): oldvalues[name] = [] @@ -1064,24 +1146,25 @@ if not propvalues: return propvalues - if propvalues.has_key('creation') or propvalues.has_key('activity'): + if 'creation' in propvalues or 'activity' in propvalues: raise KeyError, '"creation" and "activity" are reserved' - if propvalues.has_key('id'): + if 'id' in propvalues: raise KeyError, '"id" is reserved' if self.db.journaltag is None: - raise hyperdb.DatabaseError, _('Database open read-only') + raise hyperdb.DatabaseError(_('Database open read-only')) node = self.db.getnode(self.classname, nodeid) - if node.has_key(self.db.RETIRED_FLAG): + if self.db.RETIRED_FLAG in node: raise IndexError num_re = re.compile('^\d+$') # if the journal value is to be different, store it in here journalvalues = {} - for propname, value in propvalues.items(): + # list() propvalues 'cos it might be modified by the loop + for propname, value in list(propvalues.items()): # check to make sure we're not duplicating an existing key if propname == self.key and node[propname] != value: try: @@ -1089,7 +1172,7 @@ except KeyError: pass else: - raise ValueError, 'node with key "%s" exists'%value + raise ValueError('node with key "%s" exists'%value) # this will raise the KeyError if the property isn't valid # ... we don't use getprops() here because we only care about @@ -1097,8 +1180,8 @@ try: prop = self.properties[propname] except KeyError: - raise KeyError, '"%s" has no property named "%s"'%( - self.classname, propname) + raise KeyError('"%s" has no property named "%s"'%( + self.classname, propname)) # if the value's the same as the existing value, no sense in # doing anything @@ -1113,22 +1196,23 @@ link_class = prop.classname # if it isn't a number, it's a key if value is not None and not isinstance(value, type('')): - raise ValueError, 'property "%s" link value be a string'%( - propname) + raise ValueError('property "%s" link value be a string'%( + propname)) if isinstance(value, type('')) and not num_re.match(value): try: value = self.db.classes[link_class].lookup(value) except (TypeError, KeyError): - raise IndexError, 'new property "%s": %s not a %s'%( - propname, value, prop.classname) + raise IndexError('new property "%s": %s not a %s'%( + propname, value, prop.classname)) if (value is not None and not self.db.getclass(link_class).hasnode(value)): - raise IndexError, '%s has no node %s'%(link_class, value) + raise IndexError('%s has no node %s'%(link_class, + value)) if self.do_journal and prop.do_journal: # register the unlink with the old linked node - if node.has_key(propname) and node[propname] is not None: + if propname in node and node[propname] is not None: self.db.addjournal(link_class, node[propname], 'unlink', (self.classname, nodeid, propname)) @@ -1141,22 +1225,22 @@ if value is None: value = [] if not hasattr(value, '__iter__'): - raise TypeError, 'new property "%s" not an iterable of'\ - ' ids'%propname + raise TypeError('new property "%s" not an iterable of' + ' ids'%propname) link_class = self.properties[propname].classname l = [] for entry in value: # if it isn't a number, it's a key if type(entry) != type(''): - raise ValueError, 'new property "%s" link value ' \ - 'must be a string'%propname + raise ValueError('new property "%s" link value ' + 'must be a string'%propname) if not num_re.match(entry): try: entry = self.db.classes[link_class].lookup(entry) except (TypeError, KeyError): - raise IndexError, 'new property "%s": %s not a %s'%( + raise IndexError('new property "%s": %s not a %s'%( propname, entry, - self.properties[propname].classname) + self.properties[propname].classname)) l.append(entry) value = l propvalues[propname] = value @@ -1166,7 +1250,7 @@ remove = [] # handle removals - if node.has_key(propname): + if propname in node: l = node[propname] else: l = [] @@ -1183,7 +1267,8 @@ # handle additions for id in value: if not self.db.getclass(link_class).hasnode(id): - raise IndexError, '%s has no node %s'%(link_class, id) + raise IndexError('%s has no node %s'%(link_class, + id)) if id in l: continue # register the link with the newly linked node @@ -1204,38 +1289,45 @@ elif isinstance(prop, hyperdb.String): if value is not None and type(value) != type('') and type(value) != type(u''): - raise TypeError, 'new property "%s" not a string'%propname + raise TypeError('new property "%s" not a ' + 'string'%propname) if prop.indexme: self.db.indexer.add_text((self.classname, nodeid, propname), value) elif isinstance(prop, hyperdb.Password): if not isinstance(value, password.Password): - raise TypeError, 'new property "%s" not a Password'%propname + raise TypeError('new property "%s" not a ' + 'Password'%propname) propvalues[propname] = value + journalvalues[propname] = \ + current and password.JournalPassword(current) elif value is not None and isinstance(prop, hyperdb.Date): if not isinstance(value, date.Date): - raise TypeError, 'new property "%s" not a Date'% propname + raise TypeError('new property "%s" not a ' + 'Date'%propname) propvalues[propname] = value elif value is not None and isinstance(prop, hyperdb.Interval): if not isinstance(value, date.Interval): - raise TypeError, 'new property "%s" not an '\ - 'Interval'%propname + raise TypeError('new property "%s" not an ' + 'Interval'%propname) propvalues[propname] = value elif value is not None and isinstance(prop, hyperdb.Number): try: float(value) except ValueError: - raise TypeError, 'new property "%s" not numeric'%propname + raise TypeError('new property "%s" not ' + 'numeric'%propname) elif value is not None and isinstance(prop, hyperdb.Boolean): try: int(value) except ValueError: - raise TypeError, 'new property "%s" not boolean'%propname + raise TypeError('new property "%s" not ' + 'boolean'%propname) node[propname] = value @@ -1268,7 +1360,7 @@ to modify the "creation" or "activity" properties cause a KeyError. """ if self.db.journaltag is None: - raise hyperdb.DatabaseError, _('Database open read-only') + raise hyperdb.DatabaseError(_('Database open read-only')) self.fireAuditors('retire', nodeid, None) @@ -1286,7 +1378,7 @@ Make node available for all operations like it was before retirement. """ if self.db.journaltag is None: - raise hyperdb.DatabaseError, _('Database open read-only') + raise hyperdb.DatabaseError(_('Database open read-only')) node = self.db.getnode(self.classname, nodeid) # check if key property was overrided @@ -1296,8 +1388,8 @@ except KeyError: pass else: - raise KeyError, "Key property (%s) of retired node clashes with \ - existing one (%s)" % (key, node[key]) + raise KeyError("Key property (%s) of retired node clashes " + "with existing one (%s)" % (key, node[key])) # Now we can safely restore node self.fireAuditors('restore', nodeid, None) del node[self.db.RETIRED_FLAG] @@ -1311,7 +1403,7 @@ """Return true if the node is retired. """ node = self.db.getnode(self.classname, nodeid, cldb) - if node.has_key(self.db.RETIRED_FLAG): + if self.db.RETIRED_FLAG in node: return 1 return 0 @@ -1332,26 +1424,9 @@ support the session storage of the cgi interface. """ if self.db.journaltag is None: - raise hyperdb.DatabaseError, _('Database open read-only') + raise hyperdb.DatabaseError(_('Database open read-only')) self.db.destroynode(self.classname, nodeid) - def history(self, nodeid): - """Retrieve the journal of edits on a particular node. - - 'nodeid' must be the id of an existing node of this class or an - IndexError is raised. - - The returned list contains tuples of the form - - (nodeid, date, tag, action, params) - - 'date' is a Timestamp object specifying the time of the change and - 'tag' is the journaltag specified when the database was opened. - """ - if not self.do_journal: - raise ValueError, 'Journalling is disabled for this class' - return self.db.getjournal(self.classname, nodeid) - # Locating nodes: def hasnode(self, nodeid): """Determine if the given nodeid actually exists @@ -1368,7 +1443,7 @@ """ prop = self.getprops()[propname] if not isinstance(prop, hyperdb.String): - raise TypeError, 'key properties must be String' + raise TypeError('key properties must be String') self.key = propname def getkey(self): @@ -1385,21 +1460,22 @@ otherwise a KeyError is raised. """ if not self.key: - raise TypeError, 'No key property set for class %s'%self.classname + raise TypeError('No key property set for ' + 'class %s'%self.classname) cldb = self.db.getclassdb(self.classname) try: for nodeid in self.getnodeids(cldb): node = self.db.getnode(self.classname, nodeid, cldb) - if node.has_key(self.db.RETIRED_FLAG): + if self.db.RETIRED_FLAG in node: continue - if not node.has_key(self.key): + if self.key not in node: continue if node[self.key] == keyvalue: return nodeid finally: cldb.close() - raise KeyError, 'No key (%s) value "%s" for "%s"'%(self.key, - keyvalue, self.classname) + raise KeyError('No key (%s) value "%s" for "%s"'%(self.key, + keyvalue, self.classname)) # change from spec - allows multiple props to match def find(self, **propspec): @@ -1417,12 +1493,12 @@ db.issue.find(messages='1') db.issue.find(messages={'1':1,'3':1}, files={'7':1}) """ - propspec = propspec.items() - for propname, itemids in propspec: + for propname, itemids in propspec.iteritems(): # check the prop is OK prop = self.properties[propname] if not isinstance(prop, hyperdb.Link) and not isinstance(prop, hyperdb.Multilink): - raise TypeError, "'%s' not a Link/Multilink property"%propname + raise TypeError("'%s' not a Link/Multilink " + "property"%propname) # ok, now do the find cldb = self.db.getclassdb(self.classname) @@ -1430,15 +1506,15 @@ try: for id in self.getnodeids(db=cldb): item = self.db.getnode(self.classname, id, db=cldb) - if item.has_key(self.db.RETIRED_FLAG): + if self.db.RETIRED_FLAG in item: continue - for propname, itemids in propspec: + for propname, itemids in propspec.iteritems(): if type(itemids) is not type({}): itemids = {itemids:1} # special case if the item doesn't have this property - if not item.has_key(propname): - if itemids.has_key(None): + if propname not in item: + if None in itemids: l.append(id) break continue @@ -1446,13 +1522,13 @@ # grab the property definition and its value on this item prop = self.properties[propname] value = item[propname] - if isinstance(prop, hyperdb.Link) and itemids.has_key(value): + if isinstance(prop, hyperdb.Link) and value in itemids: l.append(id) break elif isinstance(prop, hyperdb.Multilink): hit = 0 for v in value: - if itemids.has_key(v): + if v in itemids: l.append(id) hit = 1 break @@ -1470,20 +1546,20 @@ The return is a list of the id of all nodes that match. """ - for propname in requirements.keys(): + for propname in requirements: prop = self.properties[propname] if not isinstance(prop, hyperdb.String): - raise TypeError, "'%s' not a String property"%propname + raise TypeError("'%s' not a String property"%propname) requirements[propname] = requirements[propname].lower() l = [] cldb = self.db.getclassdb(self.classname) try: for nodeid in self.getnodeids(cldb): node = self.db.getnode(self.classname, nodeid, cldb) - if node.has_key(self.db.RETIRED_FLAG): + if self.db.RETIRED_FLAG in node: continue - for key, value in requirements.items(): - if not node.has_key(key): + for key, value in requirements.iteritems(): + if key not in node: break if node[key] is None or node[key].lower() != value: break @@ -1502,7 +1578,7 @@ try: for nodeid in self.getnodeids(cldb): node = self.db.getnode(cn, nodeid, cldb) - if node.has_key(self.db.RETIRED_FLAG): + if self.db.RETIRED_FLAG in node: continue l.append(nodeid) finally: @@ -1519,20 +1595,20 @@ res = [] # start off with the new nodes - if self.db.newnodes.has_key(self.classname): - res += self.db.newnodes[self.classname].keys() + if self.classname in self.db.newnodes: + res.extend(self.db.newnodes[self.classname]) must_close = False if db is None: db = self.db.getclassdb(self.classname) must_close = True try: - res = res + db.keys() + res.extend(db.keys()) # remove the uncommitted, destroyed nodes - if self.db.destroyednodes.has_key(self.classname): - for nodeid in self.db.destroyednodes[self.classname].keys(): - if db.has_key(nodeid): + if self.classname in self.db.destroyednodes: + for nodeid in self.db.destroyednodes[self.classname]: + if key_in(db, nodeid): res.remove(nodeid) # check retired flag @@ -1540,7 +1616,7 @@ l = [] for nodeid in res: node = self.db.getnode(self.classname, nodeid, db) - is_ret = node.has_key(self.db.RETIRED_FLAG) + is_ret = self.db.RETIRED_FLAG in node if retired == is_ret: l.append(nodeid) res = l @@ -1583,7 +1659,7 @@ INTERVAL = 'spec:interval' OTHER = 'spec:other' - for k, v in filterspec.items(): + for k, v in filterspec.iteritems(): propclass = props[k] if isinstance(propclass, hyperdb.Link): if type(v) is not type([]): @@ -1627,12 +1703,14 @@ pass elif isinstance(propclass, hyperdb.Boolean): - if type(v) != type([]): + if type(v) == type(""): v = v.split(',') + if type(v) != type([]): + v = [v] bv = [] for val in v: if type(val) is type(''): - bv.append(val.lower() in ('yes', 'true', 'on', '1')) + bv.append(propclass.from_raw (val)) else: bv.append(val) l.append((OTHER, k, bv)) @@ -1644,11 +1722,14 @@ elif isinstance(propclass, hyperdb.Number): if type(v) != type([]): - v = v.split(',') + try : + v = v.split(',') + except AttributeError : + v = [v] l.append((OTHER, k, [float(val) for val in v])) filterspec = l - + # now, find all the nodes that are active and pass filtering matches = [] cldb = self.db.getclassdb(cn) @@ -1657,7 +1738,7 @@ # TODO: only full-scan once (use items()) for nodeid in self.getnodeids(cldb): node = self.db.getnode(cn, nodeid, cldb) - if node.has_key(self.db.RETIRED_FLAG): + if self.db.RETIRED_FLAG in node: continue # apply filter for t, k, v in filterspec: @@ -1687,12 +1768,10 @@ if not v: match = not nv else: - # othewise, make sure this node has each of the + # otherwise, make sure this node has each of the # required values - for want in v: - if want in nv: - match = 1 - break + expr = Expression(v) + if expr.evaluate(nv): match = 1 elif t == STRING: if nv is None: nv = '' @@ -1753,7 +1832,7 @@ try: v = item[prop] except KeyError: - if JPROPS.has_key(prop): + if prop in JPROPS: # force lookup of the special journal prop v = self.get(itemid, prop) else: @@ -1782,7 +1861,7 @@ key = link.orderprop() child = pt.propdict[key] if key!='id': - if not lcache.has_key(v): + if v not in lcache: # open the link class db if it's not already if lcldb is None: lcldb = self.db.getclassdb(lcn) @@ -1847,15 +1926,15 @@ may collide with the names of existing properties, or a ValueError is raised before any properties have been added. """ - for key in properties.keys(): - if self.properties.has_key(key): - raise ValueError, key + for key in properties: + if key in self.properties: + raise ValueError(key) self.properties.update(properties) def index(self, nodeid): """ Add (or refresh) the node to search indexes """ # find all the String properties that have indexme - for prop, propclass in self.getprops().items(): + for prop, propclass in self.getprops().iteritems(): if isinstance(propclass, hyperdb.String) and propclass.indexme: # index them under (classname, nodeid, property) try: @@ -1902,7 +1981,7 @@ Return the nodeid of the node imported. """ if self.db.journaltag is None: - raise hyperdb.DatabaseError, _('Database open read-only') + raise hyperdb.DatabaseError(_('Database open read-only')) properties = self.getprops() # make the new node's property map @@ -1934,9 +2013,7 @@ elif isinstance(prop, hyperdb.Interval): value = date.Interval(value) elif isinstance(prop, hyperdb.Password): - pwd = password.Password() - pwd.unpack(value) - value = pwd + value = password.Password(encrypted=value) d[propname] = value # get a new id if necessary @@ -1962,8 +2039,8 @@ date = date.get_tuple() if action == 'set': export_data = {} - for propname, value in params.items(): - if not properties.has_key(propname): + for propname, value in params.iteritems(): + if propname not in properties: # property no longer in the schema continue @@ -1983,42 +2060,10 @@ value = str(value) export_data[propname] = value params = export_data - l = [nodeid, date, user, action, params] - r.append(map(repr, l)) + r.append([repr(nodeid), repr(date), repr(user), + repr(action), repr(params)]) return r - def import_journals(self, entries): - """Import a class's journal. - - Uses setjournal() to set the journal for each item.""" - properties = self.getprops() - d = {} - for l in entries: - l = map(eval, l) - nodeid, jdate, user, action, params = l - r = d.setdefault(nodeid, []) - if action == 'set': - for propname, value in params.items(): - prop = properties[propname] - if value is None: - pass - elif isinstance(prop, hyperdb.Date): - if type(value) == type(()): - print _('WARNING: invalid date tuple %r')%(value,) - value = date.Date( "2000-1-1" ) - value = date.Date(value) - elif isinstance(prop, hyperdb.Interval): - value = date.Interval(value) - elif isinstance(prop, hyperdb.Password): - pwd = password.Password() - pwd.unpack(value) - value = pwd - params[propname] = value - r.append((nodeid, date.Date(jdate), user, action, params)) - - for nodeid, l in d.items(): - self.db.setjournal(self.classname, nodeid, l) - class FileClass(hyperdb.FileClass, Class): """This class defines a large chunk of data. To support this, it has a mandatory String property "content" which is typically saved off @@ -2032,9 +2077,9 @@ """The newly-created class automatically includes the "content" and "type" properties. """ - if not properties.has_key('content'): + if 'content' not in properties: properties['content'] = hyperdb.String(indexme='yes') - if not properties.has_key('type'): + if 'type' not in properties: properties['type'] = hyperdb.String() Class.__init__(self, db, classname, **properties) @@ -2072,7 +2117,7 @@ if propname == 'content': try: return self.db.getfile(self.classname, nodeid, None) - except IOError, (strerror): + except IOError, strerror: # XXX by catching this we don't see an error in the log. return 'ERROR reading file: %s%s\n%s\n%s'%( self.classname, nodeid, poss_msg, strerror) @@ -2088,8 +2133,8 @@ # create the oldvalues dict - fill in any missing values oldvalues = copy.deepcopy(self.db.getnode(self.classname, itemid)) - for name,prop in self.getprops(protected=0).items(): - if oldvalues.has_key(name): + for name, prop in self.getprops(protected=0).iteritems(): + if name in oldvalues: continue if isinstance(prop, hyperdb.Multilink): oldvalues[name] = [] @@ -2098,7 +2143,7 @@ # now remove the content property so it's not stored in the db content = None - if propvalues.has_key('content'): + if 'content' in propvalues: content = propvalues['content'] del propvalues['content'] @@ -2125,7 +2170,7 @@ Use the content-type property for the content property. """ # find all the String properties that have indexme - for prop, propclass in self.getprops().items(): + for prop, propclass in self.getprops().iteritems(): if prop == 'content' and propclass.indexme: mime_type = self.get(nodeid, 'type', self.default_mime_type) self.db.indexer.add_text((self.classname, nodeid, 'content'), @@ -2148,17 +2193,17 @@ dictionary attempts to specify any of these properties or a "creation" or "activity" property, a ValueError is raised. """ - if not properties.has_key('title'): + if 'title' not in properties: properties['title'] = hyperdb.String(indexme='yes') - if not properties.has_key('messages'): + if 'messages' not in properties: properties['messages'] = hyperdb.Multilink("msg") - if not properties.has_key('files'): + if 'files' not in properties: properties['files'] = hyperdb.Multilink("file") - if not properties.has_key('nosy'): + if 'nosy' not in properties: # note: journalling is turned off as it really just wastes # space. this behaviour may be overridden in an instance properties['nosy'] = hyperdb.Multilink("user", do_journal="no") - if not properties.has_key('superseder'): + if 'superseder' not in properties: properties['superseder'] = hyperdb.Multilink(classname) Class.__init__(self, db, classname, **properties) Modified: tracker/roundup-src/roundup/backends/back_mysql.py ============================================================================== --- tracker/roundup-src/roundup/backends/back_mysql.py (original) +++ tracker/roundup-src/roundup/backends/back_mysql.py Thu Aug 4 15:46:52 2011 @@ -564,6 +564,11 @@ raise class MysqlClass: + + def supports_subselects(self): + # TODO: AFAIK its version dependent for MySQL + return False + def _subselect(self, classname, multilink_table): ''' "I can't believe it's not a toy RDBMS" see, even toy RDBMSes like gadfly and sqlite can do sub-selects... @@ -572,6 +577,70 @@ s = ','.join([x[0] for x in self.db.sql_fetchall()]) return '_%s.id not in (%s)'%(classname, s) + def create_inner(self, **propvalues): + try: + return rdbms_common.Class.create_inner(self, **propvalues) + except MySQLdb.IntegrityError, e: + self._handle_integrity_error(e, propvalues) + + def set_inner(self, nodeid, **propvalues): + try: + return rdbms_common.Class.set_inner(self, nodeid, + **propvalues) + except MySQLdb.IntegrityError, e: + self._handle_integrity_error(e, propvalues) + + def _handle_integrity_error(self, e, propvalues): + ''' Handle a MySQL IntegrityError. + + If the error is recognized, then it may be converted into an + alternative exception. Otherwise, it is raised unchanged from + this function.''' + + # There are checks in create_inner/set_inner to see if a node + # is being created with the same key as an existing node. + # But, there is a race condition -- we may pass those checks, + # only to find out that a parallel session has created the + # node by by the time we actually issue the SQL command to + # create the node. Fortunately, MySQL gives us a unique error + # code for this situation, so we can detect it here and handle + # it appropriately. + # + # The details of the race condition are as follows, where + # "X" is a classname, and the term "thread" is meant to + # refer generically to both threads and processes: + # + # Thread A Thread B + # -------- -------- + # read table for X + # create new X object + # commit + # create new X object + # + # In Thread B, the check in create_inner does not notice that + # the new X object is a duplicate of that committed in Thread + # A because MySQL's default "consistent nonlocking read" + # behavior means that Thread B sees a snapshot of the database + # at the point at which its transaction began -- which was + # before Thread A created the object. However, the attempt + # to *write* to the table for X, creating a duplicate entry, + # triggers an error at the point of the write. + # + # If both A and B's transaction begins with creating a new X + # object, then this bug cannot occur because creating the + # object requires getting a new ID, and newid() locks the id + # table until the transaction is committed or rolledback. So, + # B will block until A's commit is complete, and will not + # actually get its snapshot until A's transaction completes. + # But, if the transaction has begun prior to calling newid, + # then the snapshot has already been established. + if e[0] == ER.DUP_ENTRY: + key = propvalues[self.key] + raise ValueError, 'node with key "%s" exists' % key + # We don't know what this exception is; reraise it. + raise + + class Class(MysqlClass, rdbms_common.Class): pass class IssueClass(MysqlClass, rdbms_common.IssueClass): Modified: tracker/roundup-src/roundup/backends/back_postgresql.py ============================================================================== --- tracker/roundup-src/roundup/backends/back_postgresql.py (original) +++ tracker/roundup-src/roundup/backends/back_postgresql.py Thu Aug 4 15:46:52 2011 @@ -27,38 +27,46 @@ def connection_dict(config, dbnamestr=None): ''' read_default_group is MySQL-specific, ignore it ''' d = rdbms_common.connection_dict(config, dbnamestr) - if d.has_key('read_default_group'): + if 'read_default_group' in d: del d['read_default_group'] - if d.has_key('read_default_file'): + if 'read_default_file' in d: del d['read_default_file'] return d def db_create(config): """Clear all database contents and drop database itself""" - command = "CREATE DATABASE %s WITH ENCODING='UNICODE'"%config.RDBMS_NAME - logging.getLogger('hyperdb').info(command) + command = "CREATE DATABASE \"%s\" WITH ENCODING='UNICODE'"%config.RDBMS_NAME + if config.RDBMS_TEMPLATE : + command = command + " TEMPLATE=%s" % config.RDBMS_TEMPLATE + logging.getLogger('roundup.hyperdb').info(command) db_command(config, command) def db_nuke(config, fail_ok=0): """Clear all database contents and drop database itself""" - command = 'DROP DATABASE %s'% config.RDBMS_NAME - logging.getLogger('hyperdb').info(command) + command = 'DROP DATABASE "%s"'% config.RDBMS_NAME + logging.getLogger('roundup.hyperdb').info(command) db_command(config, command) if os.path.exists(config.DATABASE): shutil.rmtree(config.DATABASE) -def db_command(config, command): +def db_command(config, command, database='postgres'): '''Perform some sort of database-level command. Retry 10 times if we fail by conflicting with another user. + + Since PostgreSQL version 8.1 there is a database "postgres", + before "template1" seems to habe been used, so we fall back to it. + Compare to issue2550543. ''' template1 = connection_dict(config) - template1['database'] = 'template1' + template1['database'] = database try: conn = psycopg.connect(**template1) except psycopg.OperationalError, message: - raise hyperdb.DatabaseError, message + if str(message).find('database "postgres" does not exist') >= 0: + return db_command(config, command, database='template1') + raise hyperdb.DatabaseError(message) conn.set_isolation_level(0) cursor = conn.cursor() @@ -68,7 +76,7 @@ return finally: conn.close() - raise RuntimeError, '10 attempts to create database failed' + raise RuntimeError('10 attempts to create database failed') def pg_command(cursor, command): '''Execute the postgresql command, which may be blocked by some other @@ -81,7 +89,7 @@ except psycopg.ProgrammingError, err: response = str(err).split('\n')[0] if response.find('FATAL') != -1: - raise RuntimeError, response + raise RuntimeError(response) else: msgs = [ 'is being accessed by other users', @@ -94,7 +102,7 @@ if can_retry: time.sleep(1) return 0 - raise RuntimeError, response + raise RuntimeError(response) return 1 def db_exists(config): @@ -131,11 +139,12 @@ def sql_open_connection(self): db = connection_dict(self.config, 'database') - logging.getLogger('hyperdb').info('open database %r'%db['database']) + logging.getLogger('roundup.hyperdb').info( + 'open database %r'%db['database']) try: conn = psycopg.connect(**db) except psycopg.OperationalError, message: - raise hyperdb.DatabaseError, message + raise hyperdb.DatabaseError(message) cursor = conn.cursor() @@ -209,7 +218,7 @@ def add_actor_column(self): # update existing tables to have the new actor column tables = self.database_schema['tables'] - for name in tables.keys(): + for name in tables: self.sql('ALTER TABLE _%s add __actor VARCHAR(255)'%name) def __repr__(self): @@ -218,7 +227,7 @@ def sql_commit(self, fail_ok=False): ''' Actually commit to the database. ''' - logging.getLogger('hyperdb').info('commit') + logging.getLogger('roundup.hyperdb').info('commit') try: self.conn.commit() @@ -226,7 +235,8 @@ # we've been instructed that this commit is allowed to fail if fail_ok and str(message).endswith('could not serialize ' 'access due to concurrent update'): - logging.getLogger('hyperdb').info('commit FAILED, but fail_ok') + logging.getLogger('roundup.hyperdb').info( + 'commit FAILED, but fail_ok') else: raise @@ -271,7 +281,7 @@ rdbms_common.Database.clear(self) # reset the sequences - for cn in self.classes.keys(): + for cn in self.classes: self.cursor.execute('DROP SEQUENCE _%s_ids'%cn) self.cursor.execute('CREATE SEQUENCE _%s_ids'%cn) Modified: tracker/roundup-src/roundup/backends/back_sqlite.py ============================================================================== --- tracker/roundup-src/roundup/backends/back_sqlite.py (original) +++ tracker/roundup-src/roundup/backends/back_sqlite.py Thu Aug 4 15:46:52 2011 @@ -75,11 +75,11 @@ def sqlite_busy_handler(self, data, table, count): """invoked whenever SQLite tries to access a database that is locked""" + now = time.time() if count == 1: - # use a 30 second timeout (extraordinarily generous) - # for handling locked database - self._busy_handler_endtime = time.time() + 30 - elif time.time() > self._busy_handler_endtime: + # Timeout for handling locked database (default 30s) + self._busy_handler_endtime = now + self.config.RDBMS_SQLITE_TIMEOUT + elif now > self._busy_handler_endtime: # timeout expired - no more retries return 0 # sleep adaptively as retry count grows, @@ -99,14 +99,14 @@ os.makedirs(self.config.DATABASE) db = os.path.join(self.config.DATABASE, 'db') - logging.getLogger('hyperdb').info('open database %r'%db) - # set a 30 second timeout (extraordinarily generous) for handling - # locked database + logging.getLogger('roundup.hyperdb').info('open database %r'%db) + # set timeout (30 second default is extraordinarily generous) + # for handling locked database if sqlite_version == 1: conn = sqlite.connect(db=db) conn.db.sqlite_busy_handler(self.sqlite_busy_handler) else: - conn = sqlite.connect(db, timeout=30) + conn = sqlite.connect(db, timeout=self.config.RDBMS_SQLITE_TIMEOUT) conn.row_factory = sqlite.Row # pysqlite2 / sqlite3 want us to store Unicode in the db but @@ -160,7 +160,7 @@ # update existing tables to have the new actor column tables = self.database_schema['tables'] for classname, spec in self.classes.items(): - if tables.has_key(classname): + if classname in tables: dbspec = tables[classname] self.update_class(spec, dbspec, force=1, adding_v2=1) # we've updated - don't try again @@ -179,7 +179,6 @@ SQLite doesn't have ALTER TABLE, so we have to copy and regenerate the tables with the new schema. """ - new_has = spec.properties.has_key new_spec = spec.schema() new_spec[1].sort() old_spec[1].sort() @@ -187,20 +186,20 @@ # no changes return 0 - logging.getLogger('hyperdb').info('update_class %s'%spec.classname) + logging.getLogger('roundup.hyperdb').info( + 'update_class %s'%spec.classname) # detect multilinks that have been removed, and drop their table old_has = {} for name, prop in old_spec[1]: old_has[name] = 1 - if new_has(name) or not isinstance(prop, hyperdb.Multilink): + if name in spec.properties or not isinstance(prop, hyperdb.Multilink): continue # it's a multilink, and it's been removed - drop the old # table. First drop indexes. self.drop_multilink_table_indexes(spec.classname, name) sql = 'drop table %s_%s'%(spec.classname, prop) self.sql(sql) - old_has = old_has.has_key # now figure how we populate the new table if adding_v2: @@ -211,7 +210,7 @@ for propname,x in new_spec[1]: prop = properties[propname] if isinstance(prop, hyperdb.Multilink): - if not old_has(propname): + if propname not in old_has: # we need to create the new table self.create_multilink_table(spec, propname) elif force: @@ -232,7 +231,7 @@ (%s, %s)"""%(tn, self.arg, self.arg) for linkid, nodeid in rows: self.sql(sql, (int(linkid), int(nodeid))) - elif old_has(propname): + elif propname in old_has: # we copy this col over from the old table fetch.append('_'+propname) @@ -263,7 +262,7 @@ elif isinstance(prop, hyperdb.Interval): inscols.append('_'+propname) inscols.append('__'+propname+'_int__') - elif old_has(propname): + elif propname in old_has: # we copy this col over from the old table inscols.append('_'+propname) @@ -283,7 +282,7 @@ v = hyperdb.Interval(entry[name]).as_seconds() except IndexError: v = None - elif entry.has_key(name): + elif name in entry: v = hyperdb.Interval(entry[name]).as_seconds() else: v = None @@ -292,7 +291,7 @@ v = entry[name] except IndexError: v = None - elif (sqlite_version == 1 and entry.has_key(name)): + elif (sqlite_version == 1 and name in entry): v = entry[name] else: v = None @@ -397,8 +396,8 @@ """ If there's NO matches to a fetch, sqlite returns NULL instead of nothing """ - return filter(None, rdbms_common.Class.filter(self, search_matches, - filterspec, sort=sort, group=group)) + return [f for f in rdbms_common.Class.filter(self, search_matches, + filterspec, sort=sort, group=group) if f] class Class(sqliteClass, rdbms_common.Class): pass Modified: tracker/roundup-src/roundup/backends/blobfiles.py ============================================================================== --- tracker/roundup-src/roundup/backends/blobfiles.py (original) +++ tracker/roundup-src/roundup/backends/blobfiles.py Thu Aug 4 15:46:52 2011 @@ -304,6 +304,10 @@ # file just ain't there raise IOError('content file for %s not found'%filename) + def filesize(self, classname, nodeid, property=None, create=0): + filename = self.filename(classname, nodeid, property, create) + return os.path.getsize(filename) + def storefile(self, classname, nodeid, property, content): """Store the content of the file in the database. The property may be None, in which case the filename does not indicate which property Modified: tracker/roundup-src/roundup/backends/indexer_common.py ============================================================================== --- tracker/roundup-src/roundup/backends/indexer_common.py (original) +++ tracker/roundup-src/roundup/backends/indexer_common.py Thu Aug 4 15:46:52 2011 @@ -36,8 +36,6 @@ def search(self, search_terms, klass, ignore={}): """Display search results looking for [search, terms] associated with the hyperdb Class "klass". Ignore hits on {class: property}. - - "dre" is a helper, not an argument. """ # do the index lookup hits = self.getHits(search_terms, klass) @@ -45,7 +43,7 @@ return {} designator_propname = {} - for nm, propclass in klass.getprops().items(): + for nm, propclass in klass.getprops().iteritems(): if _isLink(propclass): designator_propname.setdefault(propclass.classname, []).append(nm) @@ -54,7 +52,7 @@ # and files nodeids = {} # this is the answer propspec = {} # used to do the klass.find - for l in designator_propname.values(): + for l in designator_propname.itervalues(): for propname in l: propspec[propname] = {} # used as a set (value doesn't matter) @@ -63,7 +61,7 @@ # skip this result if we don't care about this class/property classname = entry[0] property = entry[2] - if ignore.has_key((classname, property)): + if (classname, property) in ignore: continue # if it's a property on klass, it's easy @@ -71,12 +69,12 @@ # backends as that can cause problems down the track) nodeid = str(entry[1]) if classname == klass.classname: - if not nodeids.has_key(nodeid): + if nodeid not in nodeids: nodeids[nodeid] = {} continue # make sure the class is a linked one, otherwise ignore - if not designator_propname.has_key(classname): + if classname not in designator_propname: continue # it's a linked class - set up to do the klass.find @@ -84,7 +82,7 @@ propspec[linkprop][nodeid] = 1 # retain only the meaningful entries - for propname, idset in propspec.items(): + for propname, idset in list(propspec.items()): if not idset: del propspec[propname] @@ -97,16 +95,16 @@ nodeids[resid] = {} node_dict = nodeids[resid] # now figure out where it came from - for linkprop in propspec.keys(): + for linkprop in propspec: v = klass.get(resid, linkprop) # the link might be a Link so deal with a single result or None if isinstance(propdefs[linkprop], hyperdb.Link): if v is None: continue v = [v] for nodeid in v: - if propspec[linkprop].has_key(nodeid): + if nodeid in propspec[linkprop]: # OK, this node[propname] has a winner - if not node_dict.has_key(linkprop): + if linkprop not in node_dict: node_dict[linkprop] = [nodeid] else: node_dict[linkprop].append(nodeid) Modified: tracker/roundup-src/roundup/backends/indexer_dbm.py ============================================================================== --- tracker/roundup-src/roundup/backends/indexer_dbm.py (original) +++ tracker/roundup-src/roundup/backends/indexer_dbm.py Thu Aug 4 15:46:52 2011 @@ -81,7 +81,7 @@ self.load_index() # remove old entries for this identifier - if self.files.has_key(identifier): + if identifier in self.files: self.purge_entry(identifier) # split into words @@ -99,15 +99,15 @@ for word in words: if self.is_stopword(word): continue - if filedict.has_key(word): + if word in filedict: filedict[word] = filedict[word]+1 else: filedict[word] = 1 # now add to the totals - for word in filedict.keys(): + for word in filedict: # each word has a dict of {identifier: count} - if self.words.has_key(word): + if word in self.words: entry = self.words[word] else: # new word @@ -162,18 +162,18 @@ return {} if hits is None: hits = {} - for k in entry.keys(): - if not self.fileids.has_key(k): - raise ValueError, 'Index is corrupted: re-generate it' + for k in entry: + if k not in self.fileids: + raise ValueError('Index is corrupted: re-generate it') hits[k] = self.fileids[k] else: # Eliminate hits for every non-match - for fileid in hits.keys(): - if not entry.has_key(fileid): + for fileid in list(hits): + if fileid not in entry: del hits[fileid] if hits is None: return {} - return hits.values() + return list(hits.values()) segments = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ#_-!" def load_index(self, reload=0, wordlist=None): @@ -205,7 +205,7 @@ dbslice = marshal.loads(pickle_str) if dbslice.get('WORDS'): # if it has some words, add them - for word, entry in dbslice['WORDS'].items(): + for word, entry in dbslice['WORDS'].iteritems(): db['WORDS'][word] = entry if dbslice.get('FILES'): # if it has some files, add them @@ -241,7 +241,7 @@ segdicts = {} # Need batch of empty dicts for segment in letters: segdicts[segment] = {} - for word, entry in self.words.items(): # Split into segment dicts + for word, entry in self.words.iteritems(): # Split into segment dicts initchar = word[0].upper() segdicts[initchar][word] = entry @@ -262,7 +262,7 @@ ''' self.load_index() - if not self.files.has_key(identifier): + if identifier not in self.files: return file_index = self.files[identifier][0] @@ -270,8 +270,8 @@ del self.fileids[file_index] # The much harder part, cleanup the word index - for key, occurs in self.words.items(): - if occurs.has_key(file_index): + for key, occurs in self.words.iteritems(): + if file_index in occurs: del occurs[file_index] # save needed Modified: tracker/roundup-src/roundup/backends/indexer_rdbms.py ============================================================================== --- tracker/roundup-src/roundup/backends/indexer_rdbms.py (original) +++ tracker/roundup-src/roundup/backends/indexer_rdbms.py Thu Aug 4 15:46:52 2011 @@ -64,10 +64,12 @@ self.db.cursor.execute(sql, (id, )) # ok, find all the unique words in the text - text = unicode(text, "utf-8", "replace").upper() + if not isinstance(text, unicode): + text = unicode(text, "utf-8", "replace") + text = text.upper() wordlist = [w.encode("utf-8") - for w in re.findall(r'(?u)\b\w{%d,%d}\b' - % (self.minlength, self.maxlength), text)] + for w in re.findall(r'(?u)\b\w{%d,%d}\b' + % (self.minlength, self.maxlength), text)] words = set() for word in wordlist: if self.is_stopword(word): continue @@ -127,7 +129,7 @@ sql = sql%(' '.join(join_list), self.db.arg, ' '.join(match_list)) self.db.cursor.execute(sql, l) - r = map(lambda x: x[0], self.db.cursor.fetchall()) + r = [x[0] for x in self.db.cursor.fetchall()] if not r: return [] Modified: tracker/roundup-src/roundup/backends/indexer_xapian.py ============================================================================== --- tracker/roundup-src/roundup/backends/indexer_xapian.py (original) +++ tracker/roundup-src/roundup/backends/indexer_xapian.py Thu Aug 4 15:46:52 2011 @@ -24,7 +24,6 @@ '''Save the changes to the index.''' if not self.transaction_active: return - # XXX: Xapian databases don't actually implement transactions yet database = self._get_database() database.commit_transaction() self.transaction_active = False @@ -36,7 +35,6 @@ def rollback(self): if not self.transaction_active: return - # XXX: Xapian databases don't actually implement transactions yet database = self._get_database() database.cancel_transaction() self.transaction_active = False @@ -59,7 +57,9 @@ # open the database and start a transaction if needed database = self._get_database() - # XXX: Xapian databases don't actually implement transactions yet + + # XXX: Xapian now supports transactions, + # but there is a call to save_index() missing. #if not self.transaction_active: #database.begin_transaction() #self.transaction_active = True @@ -72,21 +72,10 @@ # indexed so we know what we're matching when we get results identifier = '%s:%s:%s'%identifier - # see if the id is in the database - enquire = xapian.Enquire(database) - query = xapian.Query(xapian.Query.OP_AND, [identifier]) - enquire.set_query(query) - matches = enquire.get_mset(0, 10) - if matches.size(): # would it killya to implement __len__()?? - b = matches.begin() - docid = b.get_docid() - else: - docid = None - # create the new document doc = xapian.Document() doc.set_data(identifier) - doc.add_posting(identifier, 0) + doc.add_term(identifier, 0) for match in re.finditer(r'\b\w{%d,%d}\b' % (self.minlength, self.maxlength), @@ -96,10 +85,8 @@ continue term = stemmer(word) doc.add_posting(term, match.start(0)) - if docid: - database.replace_document(docid, doc) - else: - database.add_document(doc) + + database.replace_document(identifier, doc) def find(self, wordlist): '''look up all the words in the wordlist. @@ -123,6 +110,6 @@ enquire.set_query(query) matches = enquire.get_mset(0, 10) - return [tuple(m[xapian.MSET_DOCUMENT].get_data().split(':')) + return [tuple(m.document.get_data().split(':')) for m in matches] Modified: tracker/roundup-src/roundup/backends/locking.py ============================================================================== --- tracker/roundup-src/roundup/backends/locking.py (original) +++ tracker/roundup-src/roundup/backends/locking.py Thu Aug 4 15:46:52 2011 @@ -28,12 +28,11 @@ ''' __docformat__ = 'restructuredtext' -import portalocker +from roundup.backends import portalocker def acquire_lock(path, block=1): '''Acquire a lock for the given path ''' - import portalocker file = open(path, 'w') if block: portalocker.lock(file, portalocker.LOCK_EX) Modified: tracker/roundup-src/roundup/backends/portalocker.py ============================================================================== --- tracker/roundup-src/roundup/backends/portalocker.py (original) +++ tracker/roundup-src/roundup/backends/portalocker.py Thu Aug 4 15:46:52 2011 @@ -136,10 +136,9 @@ if __name__ == '__main__': from time import time, strftime, localtime import sys - import portalocker log = open('log.txt', "a+") - portalocker.lock(log, portalocker.LOCK_EX) + lock(log, LOCK_EX) timestamp = strftime("%m/%d/%Y %H:%M:%S\n", localtime(time())) log.write( timestamp ) Modified: tracker/roundup-src/roundup/backends/rdbms_common.py ============================================================================== --- tracker/roundup-src/roundup/backends/rdbms_common.py (original) +++ tracker/roundup-src/roundup/backends/rdbms_common.py Thu Aug 4 15:46:52 2011 @@ -52,7 +52,7 @@ __docformat__ = 'restructuredtext' # standard python modules -import sys, os, time, re, errno, weakref, copy, logging +import sys, os, time, re, errno, weakref, copy, logging, datetime # roundup modules from roundup import hyperdb, date, password, roundupdb, security, support @@ -62,15 +62,19 @@ from roundup.support import reversed from roundup.i18n import _ + # support -from blobfiles import FileStorage +from roundup.backends.blobfiles import FileStorage try: - from indexer_xapian import Indexer + from roundup.backends.indexer_xapian import Indexer except ImportError: - from indexer_rdbms import Indexer -from sessions_rdbms import Sessions, OneTimeKeys + from roundup.backends.indexer_rdbms import Indexer +from roundup.backends.sessions_rdbms import Sessions, OneTimeKeys from roundup.date import Range +from roundup.backends.back_anydbm import compile_expression + + # dummy value meaning "argument not passed" _marker = [] @@ -87,6 +91,13 @@ # assume it's a number returned from the db API return int(value) +def date_to_hyperdb_value(d): + """ convert date d to a roundup date """ + if isinstance (d, datetime.datetime): + return date.Date(d) + return date.Date (str(d).replace(' ', '.')) + + def connection_dict(config, dbnamestr=None): """ Used by Postgresql and MySQL to detemine the keyword args for opening the database connection.""" @@ -100,6 +111,54 @@ d[name] = config[cvar] return d + +class IdListOptimizer: + """ To prevent flooding the SQL parser of the underlaying + db engine with "x IN (1, 2, 3, ..., )" collapses + these cases to "x BETWEEN 1 AND ". + """ + + def __init__(self): + self.ranges = [] + self.singles = [] + + def append(self, nid): + """ Invariant: nid are ordered ascending """ + if self.ranges: + last = self.ranges[-1] + if last[1] == nid-1: + last[1] = nid + return + if self.singles: + last = self.singles[-1] + if last == nid-1: + self.singles.pop() + self.ranges.append([last, nid]) + return + self.singles.append(nid) + + def where(self, field, placeholder): + ranges = self.ranges + singles = self.singles + + if not singles and not ranges: return "(1=0)", [] + + if ranges: + between = '%s BETWEEN %s AND %s' % ( + field, placeholder, placeholder) + stmnt = [between] * len(ranges) + else: + stmnt = [] + if singles: + stmnt.append('%s in (%s)' % ( + field, ','.join([placeholder]*len(singles)))) + + return '(%s)' % ' OR '.join(stmnt), sum(ranges, []) + singles + + def __str__(self): + return "ranges: %r / singles: %r" % (self.ranges, self.singles) + + class Database(FileStorage, hyperdb.Database, roundupdb.Database): """ Wrapper around an SQL database that presents a hyperdb interface. @@ -123,8 +182,7 @@ # keep a cache of the N most recently retrieved rows of any kind # (classname, nodeid) = row self.cache_size = config.RDBMS_CACHE_SIZE - self.cache = {} - self.cache_lru = [] + self.clearCache() self.stats = {'cache_hits': 0, 'cache_misses': 0, 'get_items': 0, 'filtering': 0} @@ -151,14 +209,16 @@ """ raise NotImplemented - def sql(self, sql, args=None): + def sql(self, sql, args=None, cursor=None): """ Execute the sql with the optional args. """ self.log_debug('SQL %r %r'%(sql, args)) + if not cursor: + cursor = self.cursor if args: - self.cursor.execute(sql, args) + cursor.execute(sql, args) else: - self.cursor.execute(sql) + cursor.execute(sql) def sql_fetchone(self): """ Fetch a single row. If there's nothing to fetch, return None. @@ -170,6 +230,14 @@ """ return self.cursor.fetchall() + def sql_fetchiter(self): + """ Fetch all row as a generator + """ + while True: + row = self.cursor.fetchone() + if not row: break + yield row + def sql_stringquote(self, value): """ Quote the string so it's safe to put in the 'sql quotes' """ @@ -208,8 +276,8 @@ # handle changes in the schema tables = self.database_schema['tables'] - for classname, spec in self.classes.items(): - if tables.has_key(classname): + for classname, spec in self.classes.iteritems(): + if classname in tables: dbspec = tables[classname] if self.update_class(spec, dbspec): tables[classname] = spec.schema() @@ -219,8 +287,8 @@ tables[classname] = spec.schema() save = 1 - for classname, spec in tables.items(): - if not self.classes.has_key(classname): + for classname, spec in list(tables.items()): + if classname not in self.classes: self.drop_class(classname, tables[classname]) del tables[classname] save = 1 @@ -298,7 +366,7 @@ def fix_version_4_tables(self): # note this is an explicit call now c = self.cursor - for cn, klass in self.classes.items(): + for cn, klass in self.classes.iteritems(): c.execute('select id from _%s where __retired__<>0'%(cn,)) for (id,) in c.fetchall(): c.execute('update _%s set __retired__=%s where id=%s'%(cn, @@ -311,7 +379,7 @@ """Get current journal table contents, drop the table and re-create""" c = self.cursor cols = ','.join('nodeid date tag action params'.split()) - for klass in self.classes.values(): + for klass in self.classes.itervalues(): # slurp and drop sql = 'select %s from %s__journal order by date'%(cols, klass.classname) @@ -333,9 +401,9 @@ """Get current Class tables that contain String properties, and convert the VARCHAR columns to TEXT""" c = self.cursor - for klass in self.classes.values(): + for klass in self.classes.itervalues(): # slurp and drop - cols, mls = self.determine_columns(klass.properties.items()) + cols, mls = self.determine_columns(list(klass.properties.iteritems())) scols = ','.join([i[0] for i in cols]) sql = 'select id,%s from _%s'%(scols, klass.classname) c.execute(sql) @@ -365,7 +433,7 @@ if classname: classes = [self.getclass(classname)] else: - classes = self.classes.values() + classes = list(self.classes.itervalues()) for klass in classes: if show_progress: for nodeid in support.Progress('Reindex %s'%klass.classname, @@ -396,7 +464,7 @@ if issubclass(propclass, k): return v - raise ValueError, '%r is not a hyperdb property class' % propclass + raise ValueError('%r is not a hyperdb property class' % propclass) def determine_columns(self, properties): """ Figure the column names and multilink properties from the spec @@ -418,7 +486,7 @@ continue if isinstance(prop, type('')): - raise ValueError, "string property spec!" + raise ValueError("string property spec!") #and prop.find('Multilink') != -1: #mls.append(col) @@ -438,7 +506,6 @@ If 'force' is true, update the database anyway. """ - new_has = spec.properties.has_key new_spec = spec.schema() new_spec[1].sort() old_spec[1].sort() @@ -446,7 +513,10 @@ # no changes return 0 - logger = logging.getLogger('hyperdb') + if not self.config.RDBMS_ALLOW_ALTER: + raise DatabaseError(_('ALTER operation disallowed: %r -> %r.'%(old_spec, new_spec))) + + logger = logging.getLogger('roundup.hyperdb') logger.info('update_class %s'%spec.classname) logger.debug('old_spec %r'%(old_spec,)) @@ -464,7 +534,7 @@ old_has = {} for name, prop in old_spec[1]: old_has[name] = 1 - if new_has(name): + if name in spec.properties: continue if prop.find('Multilink to') != -1: @@ -483,17 +553,16 @@ sql = 'alter table _%s drop column _%s'%(spec.classname, name) self.sql(sql) - old_has = old_has.has_key # if we didn't remove the key prop just then, but the key prop has # changed, we still need to remove the old index - if keyprop_changes.has_key('remove'): + if 'remove' in keyprop_changes: self.drop_class_table_key_index(spec.classname, keyprop_changes['remove']) # add new columns for propname, prop in new_spec[1]: - if old_has(propname): + if propname in old_has: continue prop = spec.properties[propname] if isinstance(prop, Multilink): @@ -518,7 +587,7 @@ # if we didn't add the key prop just then, but the key prop has # changed, we still need to add the new index - if keyprop_changes.has_key('add'): + if 'add' in keyprop_changes: self.create_class_table_key_index(spec.classname, keyprop_changes['add']) @@ -528,7 +597,7 @@ """Figure out the columns from the spec and also add internal columns """ - cols, mls = self.determine_columns(spec.properties.items()) + cols, mls = self.determine_columns(list(spec.properties.iteritems())) # add on our special columns cols.append(('id', 'INTEGER PRIMARY KEY')) @@ -671,6 +740,10 @@ def create_class(self, spec): """ Create a database table according to the given spec. """ + + if not self.config.RDBMS_ALLOW_CREATE: + raise DatabaseError(_('CREATE operation disallowed: "%s".'%spec.classname)) + cols, mls = self.create_class_table(spec) self.create_journal_table(spec) @@ -683,6 +756,10 @@ Drop the journal and multilink tables too. """ + + if not self.config.RDBMS_ALLOW_DROP: + raise DatabaseError(_('DROP operation disallowed: "%s".'%cn)) + properties = spec[1] # figure the multilinks mls = [] @@ -716,16 +793,16 @@ def __getattr__(self, classname): """ A convenient way of calling self.getclass(classname). """ - if self.classes.has_key(classname): + if classname in self.classes: return self.classes[classname] - raise AttributeError, classname + raise AttributeError(classname) def addclass(self, cl): """ Add a Class to the hyperdatabase. """ cn = cl.classname - if self.classes.has_key(cn): - raise ValueError, cn + if cn in self.classes: + raise ValueError(cn) self.classes[cn] = cl # add default Edit and View permissions @@ -735,13 +812,13 @@ description="User is allowed to edit "+cn) self.security.addPermission(name="View", klass=cn, description="User is allowed to access "+cn) + self.security.addPermission(name="Retire", klass=cn, + description="User is allowed to retire "+cn) def getclasses(self): """ Return a list of the names of all existing classes. """ - l = self.classes.keys() - l.sort() - return l + return sorted(self.classes) def getclass(self, classname): """Get the Class object representing a particular class. @@ -751,7 +828,7 @@ try: return self.classes[classname] except KeyError: - raise KeyError, 'There is no class called "%s"'%classname + raise KeyError('There is no class called "%s"'%classname) def clear(self): """Delete all database contents. @@ -759,8 +836,8 @@ Note: I don't commit here, which is different behaviour to the "nuke from orbit" behaviour in the dbs. """ - logging.getLogger('hyperdb').info('clear') - for cn in self.classes.keys(): + logging.getLogger('roundup.hyperdb').info('clear') + for cn in self.classes: sql = 'delete from _%s'%cn self.sql(sql) @@ -790,7 +867,22 @@ if issubclass(propklass, k): return v - raise ValueError, '%r is not a hyperdb property class' % propklass + raise ValueError('%r is not a hyperdb property class' % propklass) + + def _cache_del(self, key): + del self.cache[key] + self.cache_lru.remove(key) + + def _cache_refresh(self, key): + self.cache_lru.remove(key) + self.cache_lru.insert(0, key) + + def _cache_save(self, key, node): + self.cache[key] = node + # update the LRU + self.cache_lru.insert(0, key) + if len(self.cache_lru) > self.cache_size: + del self.cache[self.cache_lru.pop()] def addnode(self, classname, nodeid, node): """ Add the specified node to its class's db. @@ -800,11 +892,11 @@ # determine the column definitions and multilink tables cl = self.classes[classname] - cols, mls = self.determine_columns(cl.properties.items()) + cols, mls = self.determine_columns(list(cl.properties.iteritems())) # we'll be supplied these props if we're doing an import values = node.copy() - if not values.has_key('creator'): + if 'creator' not in values: # add in the "calculated" properties (dupe so we don't affect # calling code's node assumptions) values['creation'] = values['activity'] = date.Date() @@ -815,8 +907,8 @@ del props['id'] # default the non-multilink columns - for col, prop in props.items(): - if not values.has_key(col): + for col, prop in props.iteritems(): + if col not in values: if isinstance(prop, Multilink): values[col] = [] else: @@ -824,9 +916,8 @@ # clear this node out of the cache if it's in there key = (classname, nodeid) - if self.cache.has_key(key): - del self.cache[key] - self.cache_lru.remove(key) + if key in self.cache: + self._cache_del(key) # figure the values to insert vals = [] @@ -874,9 +965,8 @@ # clear this node out of the cache if it's in there key = (classname, nodeid) - if self.cache.has_key(key): - del self.cache[key] - self.cache_lru.remove(key) + if key in self.cache: + self._cache_del(key) cl = self.classes[classname] props = cl.getprops() @@ -884,7 +974,7 @@ cols = [] mls = [] # add the multilinks separately - for col in values.keys(): + for col in values: prop = props[col] if isinstance(prop, Multilink): mls.append(col) @@ -953,7 +1043,7 @@ self.sql(sql, (entry, nodeid)) # we have multilink changes to apply - for col, (add, remove) in multilink_changes.items(): + for col, (add, remove) in multilink_changes.iteritems(): tn = '%s_%s'%(classname, col) if add: sql = 'insert into %s (nodeid, linkid) values (%s,%s)'%(tn, @@ -970,7 +1060,7 @@ sql_to_hyperdb_value = { hyperdb.String : str, - hyperdb.Date : lambda x:date.Date(str(x).replace(' ', '.')), + hyperdb.Date : date_to_hyperdb_value, # hyperdb.Link : int, # XXX numeric ids hyperdb.Link : str, hyperdb.Interval : date.Interval, @@ -990,20 +1080,47 @@ if issubclass(propklass, k): return v - raise ValueError, '%r is not a hyperdb property class' % propklass + raise ValueError('%r is not a hyperdb property class' % propklass) - def getnode(self, classname, nodeid): + def _materialize_multilink(self, classname, nodeid, node, propname): + """ evaluation of single Multilink (lazy eval may have skipped this) + """ + if propname not in node: + sql = 'select linkid from %s_%s where nodeid=%s'%(classname, + propname, self.arg) + self.sql(sql, (nodeid,)) + # extract the first column from the result + # XXX numeric ids + items = [int(x[0]) for x in self.cursor.fetchall()] + items.sort () + node[propname] = [str(x) for x in items] + + def _materialize_multilinks(self, classname, nodeid, node, props=None): + """ get all Multilinks of a node (lazy eval may have skipped this) + """ + cl = self.classes[classname] + props = props or [pn for (pn, p) in cl.properties.iteritems() + if isinstance(p, Multilink)] + for propname in props: + if propname not in node: + self._materialize_multilink(classname, nodeid, node, propname) + + def getnode(self, classname, nodeid, fetch_multilinks=True): """ Get a node from the database. + For optimisation optionally we don't fetch multilinks + (lazy Multilinks). + But for internal database operations we need them. """ # see if we have this node cached key = (classname, nodeid) - if self.cache.has_key(key): + if key in self.cache: # push us back to the top of the LRU - self.cache_lru.remove(key) - self.cache_lru.insert(0, key) + self._cache_refresh(key) if __debug__: self.stats['cache_hits'] += 1 # return the cached information + if fetch_multilinks: + self._materialize_multilinks(classname, nodeid, self.cache[key]) return self.cache[key] if __debug__: @@ -1012,7 +1129,7 @@ # figure the columns we're fetching cl = self.classes[classname] - cols, mls = self.determine_columns(cl.properties.items()) + cols, mls = self.determine_columns(list(cl.properties.iteritems())) scols = ','.join([col for col,dt in cols]) # perform the basic property fetch @@ -1021,7 +1138,7 @@ values = self.sql_fetchone() if values is None: - raise IndexError, 'no such %s node %s'%(classname, nodeid) + raise IndexError('no such %s node %s'%(classname, nodeid)) # make up the node node = {} @@ -1037,26 +1154,12 @@ value = self.to_hyperdb_value(props[name].__class__)(value) node[name] = value - - # now the multilinks - for col in mls: - # get the link ids - sql = 'select linkid from %s_%s where nodeid=%s'%(classname, col, - self.arg) - self.sql(sql, (nodeid,)) - # extract the first column from the result - # XXX numeric ids - items = [int(x[0]) for x in self.cursor.fetchall()] - items.sort () - node[col] = [str(x) for x in items] + if fetch_multilinks and mls: + self._materialize_multilinks(classname, nodeid, node, mls) # save off in the cache key = (classname, nodeid) - self.cache[key] = node - # update the LRU - self.cache_lru.insert(0, key) - if len(self.cache_lru) > self.cache_size: - del self.cache[self.cache_lru.pop()] + self._cache_save(key, node) if __debug__: self.stats['get_items'] += (time.time() - start_t) @@ -1067,14 +1170,15 @@ """Remove a node from the database. Called exclusively by the destroy() method on Class. """ - logging.getLogger('hyperdb').info('destroynode %s%s'%(classname, nodeid)) + logging.getLogger('roundup.hyperdb').info('destroynode %s%s'%( + classname, nodeid)) # make sure the node exists if not self.hasnode(classname, nodeid): - raise IndexError, '%s has no node %s'%(classname, nodeid) + raise IndexError('%s has no node %s'%(classname, nodeid)) # see if we have this node cached - if self.cache.has_key((classname, nodeid)): + if (classname, nodeid) in self.cache: del self.cache[(classname, nodeid)] # see if there's any obvious commit actions that we should get rid of @@ -1088,7 +1192,7 @@ # remove from multilnks cl = self.getclass(classname) - x, mls = self.determine_columns(cl.properties.items()) + x, mls = self.determine_columns(list(cl.properties.iteritems())) for col in mls: # get the link ids sql = 'delete from %s_%s where nodeid=%s'%(classname, col, self.arg) @@ -1109,7 +1213,7 @@ return 0 # If this node is in the cache, then we do not need to go to # the database. (We don't consider this an LRU hit, though.) - if self.cache.has_key((classname, nodeid)): + if (classname, nodeid) in self.cache: # Return 1, not True, to match the type of the result of # the SQL operation below. return 1 @@ -1189,7 +1293,7 @@ """Convert the journal params values into safely repr'able and eval'able values.""" properties = self.getclass(classname).getprops() - for param, value in params.items(): + for param, value in params.iteritems(): if not value: continue property = properties[param] @@ -1208,7 +1312,7 @@ """ # make sure the node exists if not self.hasnode(classname, nodeid): - raise IndexError, '%s has no node %s'%(classname, nodeid) + raise IndexError('%s has no node %s'%(classname, nodeid)) cols = ','.join('nodeid date tag action params'.split()) journal = self.load_journal(classname, cols, nodeid) @@ -1220,7 +1324,7 @@ for nodeid, date_stamp, user, action, params in journal: params = eval(params) if isinstance(params, type({})): - for param, value in params.items(): + for param, value in params.iteritems(): if not value: continue property = properties.get(param, None) @@ -1229,7 +1333,7 @@ continue cvt = self.to_hyperdb_value(property.__class__) if isinstance(property, Password): - params[param] = cvt(value) + params[param] = password.JournalPassword(value) elif isinstance(property, Date): params[param] = cvt(value) elif isinstance(property, Interval): @@ -1267,7 +1371,7 @@ date_stamp = self.to_sql_value(Date)(pack_before) # do the delete - for classname in self.classes.keys(): + for classname in self.classes: sql = "delete from %s__journal where date<%s and "\ "action<>'create'"%(classname, self.arg) self.sql(sql, (date_stamp,)) @@ -1275,7 +1379,7 @@ def sql_commit(self, fail_ok=False): """ Actually commit to the database. """ - logging.getLogger('hyperdb').info('commit') + logging.getLogger('roundup.hyperdb').info('commit') self.conn.commit() @@ -1307,6 +1411,11 @@ # clear out the transactions self.transactions = [] + # clear the cache: Don't carry over cached values from one + # transaction to the next (there may be other changes from other + # transactions) + self.clearCache() + def sql_rollback(self): self.conn.rollback() @@ -1316,7 +1425,7 @@ Undo all the changes made since the database was opened or the last commit() or rollback() was performed. """ - logging.getLogger('hyperdb').info('rollback') + logging.getLogger('roundup.hyperdb').info('rollback') self.sql_rollback() @@ -1331,7 +1440,7 @@ self.clearCache() def sql_close(self): - logging.getLogger('hyperdb').info('close') + logging.getLogger('roundup.hyperdb').info('close') self.conn.close() def close(self): @@ -1354,7 +1463,7 @@ """ A dumpable version of the schema that we can store in the database """ - return (self.key, [(x, repr(y)) for x,y in self.properties.items()]) + return (self.key, [(x, repr(y)) for x,y in self.properties.iteritems()]) def enableJournalling(self): """Turn journalling on for this class @@ -1392,51 +1501,52 @@ def create_inner(self, **propvalues): """ Called by create, in-between the audit and react calls. """ - if propvalues.has_key('id'): - raise KeyError, '"id" is reserved' + if 'id' in propvalues: + raise KeyError('"id" is reserved') if self.db.journaltag is None: - raise DatabaseError, _('Database open read-only') + raise DatabaseError(_('Database open read-only')) - if propvalues.has_key('creator') or propvalues.has_key('actor') or \ - propvalues.has_key('creation') or propvalues.has_key('activity'): - raise KeyError, '"creator", "actor", "creation" and '\ - '"activity" are reserved' + if ('creator' in propvalues or 'actor' in propvalues or + 'creation' in propvalues or 'activity' in propvalues): + raise KeyError('"creator", "actor", "creation" and ' + '"activity" are reserved') # new node's id newid = self.db.newid(self.classname) # validate propvalues num_re = re.compile('^\d+$') - for key, value in propvalues.items(): + for key, value in propvalues.iteritems(): if key == self.key: try: self.lookup(value) except KeyError: pass else: - raise ValueError, 'node with key "%s" exists'%value + raise ValueError('node with key "%s" exists'%value) # try to handle this property try: prop = self.properties[key] except KeyError: - raise KeyError, '"%s" has no property "%s"'%(self.classname, - key) + raise KeyError('"%s" has no property "%s"'%(self.classname, + key)) if value is not None and isinstance(prop, Link): if type(value) != type(''): - raise ValueError, 'link value must be String' + raise ValueError('link value must be String') link_class = self.properties[key].classname # if it isn't a number, it's a key if not num_re.match(value): try: value = self.db.classes[link_class].lookup(value) except (TypeError, KeyError): - raise IndexError, 'new property "%s": %s not a %s'%( - key, value, link_class) + raise IndexError('new property "%s": %s not a %s'%( + key, value, link_class)) elif not self.db.getclass(link_class).hasnode(value): - raise IndexError, '%s has no node %s'%(link_class, value) + raise IndexError('%s has no node %s'%(link_class, + value)) # save off the value propvalues[key] = value @@ -1450,22 +1560,21 @@ if value is None: value = [] if not hasattr(value, '__iter__'): - raise TypeError, 'new property "%s" not an iterable of ids'%key - + raise TypeError('new property "%s" not an iterable of ids'%key) # clean up and validate the list of links link_class = self.properties[key].classname l = [] for entry in value: if type(entry) != type(''): - raise ValueError, '"%s" multilink value (%r) '\ - 'must contain Strings'%(key, value) + raise ValueError('"%s" multilink value (%r) ' + 'must contain Strings'%(key, value)) # if it isn't a number, it's a key if not num_re.match(entry): try: entry = self.db.classes[link_class].lookup(entry) except (TypeError, KeyError): - raise IndexError, 'new property "%s": %s not a %s'%( - key, entry, self.properties[key].classname) + raise IndexError('new property "%s": %s not a %s'%( + key, entry, self.properties[key].classname)) l.append(entry) value = l propvalues[key] = value @@ -1473,8 +1582,8 @@ # handle additions for nodeid in value: if not self.db.getclass(link_class).hasnode(nodeid): - raise IndexError, '%s has no node %s'%(link_class, - nodeid) + raise IndexError('%s has no node %s'%(link_class, + nodeid)) # register the link with the newly linked node if self.do_journal and self.properties[key].do_journal: self.db.addjournal(link_class, nodeid, 'link', @@ -1482,41 +1591,41 @@ elif isinstance(prop, String): if type(value) != type('') and type(value) != type(u''): - raise TypeError, 'new property "%s" not a string'%key + raise TypeError('new property "%s" not a string'%key) if prop.indexme: self.db.indexer.add_text((self.classname, newid, key), value) elif isinstance(prop, Password): if not isinstance(value, password.Password): - raise TypeError, 'new property "%s" not a Password'%key + raise TypeError('new property "%s" not a Password'%key) elif isinstance(prop, Date): if value is not None and not isinstance(value, date.Date): - raise TypeError, 'new property "%s" not a Date'%key + raise TypeError('new property "%s" not a Date'%key) elif isinstance(prop, Interval): if value is not None and not isinstance(value, date.Interval): - raise TypeError, 'new property "%s" not an Interval'%key + raise TypeError('new property "%s" not an Interval'%key) elif value is not None and isinstance(prop, Number): try: float(value) except ValueError: - raise TypeError, 'new property "%s" not numeric'%key + raise TypeError('new property "%s" not numeric'%key) elif value is not None and isinstance(prop, Boolean): try: int(value) except ValueError: - raise TypeError, 'new property "%s" not boolean'%key + raise TypeError('new property "%s" not boolean'%key) # make sure there's data where there needs to be - for key, prop in self.properties.items(): - if propvalues.has_key(key): + for key, prop in self.properties.iteritems(): + if key in propvalues: continue if key == self.key: - raise ValueError, 'key property "%s" is required'%key + raise ValueError('key property "%s" is required'%key) if isinstance(prop, Multilink): propvalues[key] = [] else: @@ -1543,35 +1652,34 @@ return nodeid # get the node's dict - d = self.db.getnode(self.classname, nodeid) + d = self.db.getnode(self.classname, nodeid, fetch_multilinks=False) + # handle common case -- that property is in dict -- first + # if None and one of creator/creation actor/activity return None + if propname in d: + r = d [propname] + # return copy of our list + if isinstance (r, list): + return r[:] + if r is not None: + return r + elif propname in ('creation', 'activity', 'creator', 'actor'): + return r + + # propname not in d: + if propname == 'creation' or propname == 'activity': + return date.Date() + if propname == 'creator' or propname == 'actor': + return self.db.getuid() - if propname == 'creation': - if d.has_key('creation'): - return d['creation'] - else: - return date.Date() - if propname == 'activity': - if d.has_key('activity'): - return d['activity'] - else: - return date.Date() - if propname == 'creator': - if d.has_key('creator'): - return d['creator'] - else: - return self.db.getuid() - if propname == 'actor': - if d.has_key('actor'): - return d['actor'] - else: - return self.db.getuid() - - # get the property (raises KeyErorr if invalid) + # get the property (raises KeyError if invalid) prop = self.properties[propname] - # XXX may it be that propname is valid property name - # (above error is not raised) and not d.has_key(propname)??? - if (not d.has_key(propname)) or (d[propname] is None): + # lazy evaluation of Multilink + if propname not in d and isinstance(prop, Multilink): + self.db._materialize_multilink(self.classname, nodeid, d, propname) + + # handle there being no value in the table for the property + if propname not in d or d[propname] is None: if default is _marker: if isinstance(prop, Multilink): return [] @@ -1616,20 +1724,20 @@ if not propvalues: return propvalues - if propvalues.has_key('creation') or propvalues.has_key('creator') or \ - propvalues.has_key('actor') or propvalues.has_key('activity'): - raise KeyError, '"creation", "creator", "actor" and '\ - '"activity" are reserved' + if ('creator' in propvalues or 'actor' in propvalues or + 'creation' in propvalues or 'activity' in propvalues): + raise KeyError('"creator", "actor", "creation" and ' + '"activity" are reserved') - if propvalues.has_key('id'): - raise KeyError, '"id" is reserved' + if 'id' in propvalues: + raise KeyError('"id" is reserved') if self.db.journaltag is None: - raise DatabaseError, _('Database open read-only') + raise DatabaseError(_('Database open read-only')) node = self.db.getnode(self.classname, nodeid) if self.is_retired(nodeid): - raise IndexError, 'Requested item is retired' + raise IndexError('Requested item is retired') num_re = re.compile('^\d+$') # make a copy of the values dictionary - we'll modify the contents @@ -1642,7 +1750,7 @@ # for the Database layer to do its stuff multilink_changes = {} - for propname, value in propvalues.items(): + for propname, value in list(propvalues.items()): # check to make sure we're not duplicating an existing key if propname == self.key and node[propname] != value: try: @@ -1650,7 +1758,7 @@ except KeyError: pass else: - raise ValueError, 'node with key "%s" exists'%value + raise ValueError('node with key "%s" exists'%value) # this will raise the KeyError if the property isn't valid # ... we don't use getprops() here because we only care about @@ -1658,8 +1766,8 @@ try: prop = self.properties[propname] except KeyError: - raise KeyError, '"%s" has no property named "%s"'%( - self.classname, propname) + raise KeyError('"%s" has no property named "%s"'%( + self.classname, propname)) # if the value's the same as the existing value, no sense in # doing anything @@ -1674,18 +1782,19 @@ link_class = prop.classname # if it isn't a number, it's a key if value is not None and not isinstance(value, type('')): - raise ValueError, 'property "%s" link value be a string'%( - propname) + raise ValueError('property "%s" link value be a string'%( + propname)) if isinstance(value, type('')) and not num_re.match(value): try: value = self.db.classes[link_class].lookup(value) except (TypeError, KeyError): - raise IndexError, 'new property "%s": %s not a %s'%( - propname, value, prop.classname) + raise IndexError('new property "%s": %s not a %s'%( + propname, value, prop.classname)) if (value is not None and not self.db.getclass(link_class).hasnode(value)): - raise IndexError, '%s has no node %s'%(link_class, value) + raise IndexError('%s has no node %s'%(link_class, + value)) if self.do_journal and prop.do_journal: # register the unlink with the old linked node @@ -1702,22 +1811,22 @@ if value is None: value = [] if not hasattr(value, '__iter__'): - raise TypeError, 'new property "%s" not an iterable of'\ - ' ids'%propname + raise TypeError('new property "%s" not an iterable of' + ' ids'%propname) link_class = self.properties[propname].classname l = [] for entry in value: # if it isn't a number, it's a key if type(entry) != type(''): - raise ValueError, 'new property "%s" link value ' \ - 'must be a string'%propname + raise ValueError('new property "%s" link value ' + 'must be a string'%propname) if not num_re.match(entry): try: entry = self.db.classes[link_class].lookup(entry) except (TypeError, KeyError): - raise IndexError, 'new property "%s": %s not a %s'%( + raise IndexError('new property "%s": %s not a %s'%( propname, entry, - self.properties[propname].classname) + self.properties[propname].classname)) l.append(entry) value = l propvalues[propname] = value @@ -1727,7 +1836,7 @@ remove = [] # handle removals - if node.has_key(propname): + if propname in node: l = node[propname] else: l = [] @@ -1753,7 +1862,8 @@ # result in a SQL query, it is more efficient to # avoid the check if possible. if not self.db.getclass(link_class).hasnode(id): - raise IndexError, '%s has no node %s'%(link_class, id) + raise IndexError('%s has no node %s'%(link_class, + id)) # register the link with the newly linked node if self.do_journal and self.properties[propname].do_journal: self.db.addjournal(link_class, id, 'link', @@ -1773,7 +1883,7 @@ elif isinstance(prop, String): if value is not None and type(value) != type('') and type(value) != type(u''): - raise TypeError, 'new property "%s" not a string'%propname + raise TypeError('new property "%s" not a string'%propname) if prop.indexme: if value is None: value = '' self.db.indexer.add_text((self.classname, nodeid, propname), @@ -1781,31 +1891,33 @@ elif isinstance(prop, Password): if not isinstance(value, password.Password): - raise TypeError, 'new property "%s" not a Password'%propname + raise TypeError('new property "%s" not a Password'%propname) propvalues[propname] = value + journalvalues[propname] = \ + current and password.JournalPassword(current) elif value is not None and isinstance(prop, Date): if not isinstance(value, date.Date): - raise TypeError, 'new property "%s" not a Date'% propname + raise TypeError('new property "%s" not a Date'% propname) propvalues[propname] = value elif value is not None and isinstance(prop, Interval): if not isinstance(value, date.Interval): - raise TypeError, 'new property "%s" not an '\ - 'Interval'%propname + raise TypeError('new property "%s" not an ' + 'Interval'%propname) propvalues[propname] = value elif value is not None and isinstance(prop, Number): try: float(value) except ValueError: - raise TypeError, 'new property "%s" not numeric'%propname + raise TypeError('new property "%s" not numeric'%propname) elif value is not None and isinstance(prop, Boolean): try: int(value) except ValueError: - raise TypeError, 'new property "%s" not boolean'%propname + raise TypeError('new property "%s" not boolean'%propname) # nothing to do? if not propvalues: @@ -1838,7 +1950,7 @@ methods, and other nodes may reuse the values of their key properties. """ if self.db.journaltag is None: - raise DatabaseError, _('Database open read-only') + raise DatabaseError(_('Database open read-only')) self.fireAuditors('retire', nodeid, None) @@ -1858,7 +1970,7 @@ Make node available for all operations like it was before retirement. """ if self.db.journaltag is None: - raise DatabaseError, _('Database open read-only') + raise DatabaseError(_('Database open read-only')) node = self.db.getnode(self.classname, nodeid) # check if key property was overrided @@ -1868,8 +1980,8 @@ except KeyError: pass else: - raise KeyError, "Key property (%s) of retired node clashes with \ - existing one (%s)" % (key, node[key]) + raise KeyError("Key property (%s) of retired node clashes " + "with existing one (%s)" % (key, node[key])) self.fireAuditors('restore', nodeid, None) # use the arg for __retired__ to cope with any odd database type @@ -1911,26 +2023,9 @@ if there are any references to the node. """ if self.db.journaltag is None: - raise DatabaseError, _('Database open read-only') + raise DatabaseError(_('Database open read-only')) self.db.destroynode(self.classname, nodeid) - def history(self, nodeid): - """Retrieve the journal of edits on a particular node. - - 'nodeid' must be the id of an existing node of this class or an - IndexError is raised. - - The returned list contains tuples of the form - - (nodeid, date, tag, action, params) - - 'date' is a Timestamp object specifying the time of the change and - 'tag' is the journaltag specified when the database was opened. - """ - if not self.do_journal: - raise ValueError, 'Journalling is disabled for this class' - return self.db.getjournal(self.classname, nodeid) - # Locating nodes: def hasnode(self, nodeid): """Determine if the given nodeid actually exists @@ -1946,7 +2041,7 @@ """ prop = self.getprops()[propname] if not isinstance(prop, String): - raise TypeError, 'key properties must be String' + raise TypeError('key properties must be String') self.key = propname def getkey(self): @@ -1962,7 +2057,7 @@ otherwise a KeyError is raised. """ if not self.key: - raise TypeError, 'No key property set for class %s'%self.classname + raise TypeError('No key property set for class %s'%self.classname) # use the arg to handle any odd database type conversion (hello, # sqlite) @@ -1973,8 +2068,8 @@ # see if there was a result that's not retired row = self.db.sql_fetchone() if not row: - raise KeyError, 'No key (%s) value "%s" for "%s"'%(self.key, - keyvalue, self.classname) + raise KeyError('No key (%s) value "%s" for "%s"'%(self.key, + keyvalue, self.classname)) # return the id # XXX numeric ids @@ -2001,30 +2096,29 @@ # validate the args props = self.getprops() - propspec = propspec.items() - for propname, nodeids in propspec: + for propname, nodeids in propspec.iteritems(): # check the prop is OK prop = props[propname] if not isinstance(prop, Link) and not isinstance(prop, Multilink): - raise TypeError, "'%s' not a Link/Multilink property"%propname + raise TypeError("'%s' not a Link/Multilink property"%propname) # first, links a = self.db.arg allvalues = () sql = [] where = [] - for prop, values in propspec: + for prop, values in propspec.iteritems(): if not isinstance(props[prop], hyperdb.Link): continue if type(values) is type({}) and len(values) == 1: - values = values.keys()[0] + values = list(values)[0] if type(values) is type(''): allvalues += (values,) where.append('_%s = %s'%(prop, a)) elif values is None: where.append('_%s is NULL'%prop) else: - values = values.keys() + values = list(values) s = '' if None in values: values.remove(None) @@ -2038,7 +2132,7 @@ and %s"""%(self.classname, a, ' and '.join(where))) # now multilinks - for prop, values in propspec: + for prop, values in propspec.iteritems(): if not isinstance(props[prop], hyperdb.Multilink): continue if not values: @@ -2048,7 +2142,7 @@ allvalues += (values,) s = a else: - allvalues += tuple(values.keys()) + allvalues += tuple(values) s = ','.join([a]*len(values)) tn = '%s_%s'%(self.classname, prop) sql.append("""select id from _%s, %s where __retired__=%s @@ -2073,10 +2167,10 @@ """ where = [] args = [] - for propname in requirements.keys(): + for propname in requirements: prop = self.properties[propname] if not isinstance(prop, String): - raise TypeError, "'%s' not a String property"%propname + raise TypeError("'%s' not a String property"%propname) where.append(propname) args.append(requirements[propname].lower()) @@ -2135,32 +2229,106 @@ # The format parameter is replaced with the attribute. order_by_null_values = None - def filter(self, search_matches, filterspec, sort=[], group=[]): - """Return a list of the ids of the active nodes in this class that - match the 'filter' spec, sorted by the group spec and then the - sort spec - - "filterspec" is {propname: value(s)} - - "sort" and "group" are [(dir, prop), ...] where dir is '+', '-' - or None and prop is a prop name or None. Note that for - backward-compatibility reasons a single (dir, prop) tuple is - also allowed. - - "search_matches" is a container type or None + def supports_subselects(self): + '''Assuming DBs can do subselects, overwrite if they cannot. + ''' + return True + + def _filter_multilink_expression_fallback( + self, classname, multilink_table, expr): + '''This is a fallback for database that do not support + subselects.''' + + is_valid = expr.evaluate + + last_id, kws = None, [] + + ids = IdListOptimizer() + append = ids.append + + # This join and the evaluation in program space + # can be expensive for larger databases! + # TODO: Find a faster way to collect the data needed + # to evalute the expression. + # Moving the expression evaluation into the database + # would be nice but this tricky: Think about the cases + # where the multilink table does not have join values + # needed in evaluation. + + stmnt = "SELECT c.id, m.linkid FROM _%s c " \ + "LEFT OUTER JOIN %s m " \ + "ON c.id = m.nodeid ORDER BY c.id" % ( + classname, multilink_table) + self.db.sql(stmnt) + + # collect all multilink items for a class item + for nid, kw in self.db.sql_fetchiter(): + if nid != last_id: + if last_id is None: + last_id = nid + else: + # we have all multilink items -> evaluate! + if is_valid(kws): append(last_id) + last_id, kws = nid, [] + if kw is not None: + kws.append(kw) + + if last_id is not None and is_valid(kws): + append(last_id) + + # we have ids of the classname table + return ids.where("_%s.id" % classname, self.db.arg) + + def _filter_multilink_expression(self, classname, multilink_table, v): + """ Filters out elements of the classname table that do not + match the given expression. + Returns tuple of 'WHERE' introns for the overall filter. + """ + try: + opcodes = [int(x) for x in v] + if min(opcodes) >= -1: raise ValueError() - The filter must match all properties specificed. If the property - value to match is a list: + expr = compile_expression(opcodes) - 1. String properties must match all elements in the list, and - 2. Other properties must match any of the elements in the list. + if not self.supports_subselects(): + # We heavily rely on subselects. If there is + # no decent support fall back to slower variant. + return self._filter_multilink_expression_fallback( + classname, multilink_table, expr) + + atom = \ + "%s IN(SELECT linkid FROM %s WHERE nodeid=a.id)" % ( + self.db.arg, + multilink_table) + + intron = \ + "_%(classname)s.id in (SELECT id " \ + "FROM _%(classname)s AS a WHERE %(condition)s) " % { + 'classname' : classname, + 'condition' : expr.generate(lambda n: atom) } + + values = [] + def collect_values(n): values.append(n.x) + expr.visit(collect_values) + + return intron, values + except: + # original behavior + where = "%s.linkid in (%s)" % ( + multilink_table, ','.join([self.db.arg] * len(v))) + return where, v, True # True to indicate original + + def _filter_sql (self, search_matches, filterspec, srt=[], grp=[], retr=0): + """ Compute the proptree and the SQL/ARGS for a filter. + For argument description see filter below. + We return a 3-tuple, the proptree, the sql and the sql-args + or None if no SQL is necessary. + The flag retr serves to retrieve *all* non-Multilink properties + (for filling the cache during a filter_iter) """ # we can't match anything if search_matches is empty if not search_matches and search_matches is not None: - return [] - - if __debug__: - start_t = time.time() + return None icn = self.classname @@ -2173,8 +2341,8 @@ # figure the WHERE clause from the filterspec mlfilt = 0 # are we joining with Multilink tables? - sortattr = self._sortattr (group = group, sort = sort) - proptree = self._proptree(filterspec, sortattr) + sortattr = self._sortattr (group = grp, sort = srt) + proptree = self._proptree(filterspec, sortattr, retr) mlseen = 0 for pt in reversed(proptree.sortattr): p = pt @@ -2189,12 +2357,11 @@ pt.attr_sort_done = pt.tree_sort_done = True proptree.compute_sort_done() - ordercols = [] - auxcols = {} + cols = ['_%s.id'%icn] mlsort = [] rhsnum = 0 for p in proptree: - oc = None + rc = ac = oc = None cn = p.classname ln = p.uniqname pln = p.parent.uniqname @@ -2202,10 +2369,13 @@ k = p.name v = p.val propclass = p.propclass - if p.sort_type > 0: - oc = ac = '_%s._%s'%(pln, k) + if p.parent == proptree and p.name == 'id' \ + and 'retrieve' in p.need_for: + p.sql_idx = 0 + if 'sort' in p.need_for or 'retrieve' in p.need_for: + rc = oc = ac = '_%s._%s'%(pln, k) if isinstance(propclass, Multilink): - if p.sort_type < 2: + if 'search' in p.need_for: mlfilt = 1 tn = '%s_%s'%(pcn, k) if v in ('-1', ['-1'], []): @@ -2214,33 +2384,47 @@ where.append(self._subselect(pcn, tn)) else: frum.append(tn) - where.append('_%s.id=%s.nodeid'%(pln,tn)) + gen_join = True + + if p.has_values and isinstance(v, type([])): + result = self._filter_multilink_expression(pln, tn, v) + # XXX: We dont need an id join if we used the filter + gen_join = len(result) == 3 + + if gen_join: + where.append('_%s.id=%s.nodeid'%(pln,tn)) + if p.children: frum.append('_%s as _%s' % (cn, ln)) where.append('%s.linkid=_%s.id'%(tn, ln)) + if p.has_values: if isinstance(v, type([])): - s = ','.join([a for x in v]) - where.append('%s.linkid in (%s)'%(tn, s)) - args = args + v + where.append(result[0]) + args += result[1] else: where.append('%s.linkid=%s'%(tn, a)) args.append(v) - if p.sort_type > 0: + if 'sort' in p.need_for: assert not p.attr_sort_done and not p.sort_ids_needed elif k == 'id': - if p.sort_type < 2: + if 'search' in p.need_for: if isinstance(v, type([])): + # If there are no permitted values, then the + # where clause will always be false, and we + # can optimize the query away. + if not v: + return [] s = ','.join([a for x in v]) where.append('_%s.%s in (%s)'%(pln, k, s)) args = args + v else: where.append('_%s.%s=%s'%(pln, k, a)) args.append(v) - if p.sort_type > 0: - oc = ac = '_%s.id'%pln + if 'sort' in p.need_for or 'retrieve' in p.need_for: + rc = oc = ac = '_%s.id'%pln elif isinstance(propclass, String): - if p.sort_type < 2: + if 'search' in p.need_for: if not isinstance(v, type([])): v = [v] @@ -2254,12 +2438,12 @@ +' and '.join(["_%s._%s LIKE '%s'"%(pln, k, s) for s in v]) +')') # note: args are embedded in the query string now - if p.sort_type > 0: + if 'sort' in p.need_for: oc = ac = 'lower(_%s._%s)'%(pln, k) elif isinstance(propclass, Link): - if p.sort_type < 2: + if 'search' in p.need_for: if p.children: - if p.sort_type == 0: + if 'sort' not in p.need_for: frum.append('_%s as _%s' % (cn, ln)) where.append('_%s._%s=_%s.id'%(pln, k, ln)) if p.has_values: @@ -2270,11 +2454,11 @@ entry = None d[entry] = entry l = [] - if d.has_key(None) or not d: - if d.has_key(None): del d[None] + if None in d or not d: + if None in d: del d[None] l.append('_%s._%s is NULL'%(pln, k)) if d: - v = d.keys() + v = list(d) s = ','.join([a for x in v]) l.append('(_%s._%s in (%s))'%(pln, k, s)) args = args + v @@ -2287,16 +2471,18 @@ else: where.append('_%s._%s=%s'%(pln, k, a)) args.append(v) - if p.sort_type > 0: + if 'sort' in p.need_for: lp = p.cls.labelprop() oc = ac = '_%s._%s'%(pln, k) if lp != 'id': - if p.tree_sort_done and p.sort_type > 0: + if p.tree_sort_done: loj.append( 'LEFT OUTER JOIN _%s as _%s on _%s._%s=_%s.id'%( cn, ln, pln, k, ln)) oc = '_%s._%s'%(ln, lp) - elif isinstance(propclass, Date) and p.sort_type < 2: + if 'retrieve' in p.need_for: + rc = '_%s._%s'%(pln, k) + elif isinstance(propclass, Date) and 'search' in p.need_for: dc = self.db.to_sql_value(hyperdb.Date) if isinstance(v, type([])): s = ','.join([a for x in v]) @@ -2317,7 +2503,7 @@ pass elif isinstance(propclass, Interval): # filter/sort using the ___int__ column - if p.sort_type < 2: + if 'search' in p.need_for: if isinstance(v, type([])): s = ','.join([a for x in v]) where.append('_%s.__%s_int__ in (%s)'%(pln, k, s)) @@ -2335,9 +2521,29 @@ except ValueError: # If range creation fails - ignore search parameter pass - if p.sort_type > 0: + if 'sort' in p.need_for: oc = ac = '_%s.__%s_int__'%(pln,k) - elif p.sort_type < 2: + if 'retrieve' in p.need_for: + rc = '_%s._%s'%(pln,k) + elif isinstance(propclass, Boolean) and 'search' in p.need_for: + if type(v) == type(""): + v = v.split(',') + if type(v) != type([]): + v = [v] + bv = [] + for val in v: + if type(val) is type(''): + bv.append(propclass.from_raw (val)) + else: + bv.append(bool(val)) + if len(bv) == 1: + where.append('_%s._%s=%s'%(pln, k, a)) + args = args + bv + else: + s = ','.join([a for x in v]) + where.append('_%s._%s in (%s)'%(pln, k, s)) + args = args + bv + elif 'search' in p.need_for: if isinstance(v, type([])): s = ','.join([a for x in v]) where.append('_%s._%s in (%s)'%(pln, k, s)) @@ -2347,18 +2553,28 @@ args.append(v) if oc: if p.sort_ids_needed: - auxcols[ac] = p + if rc == ac: + p.sql_idx = len(cols) + p.auxcol = len(cols) + cols.append(ac) if p.tree_sort_done and p.sort_direction: - # Don't select top-level id twice - if p.name != 'id' or p.parent != proptree: - ordercols.append(oc) + # Don't select top-level id or multilink twice + if (not p.sort_ids_needed or ac != oc) and (p.name != 'id' + or p.parent != proptree): + if rc == oc: + p.sql_idx = len(cols) + cols.append(oc) desc = ['', ' desc'][p.sort_direction == '-'] # Some SQL dbs sort NULL values last -- we want them first. if (self.order_by_null_values and p.name != 'id'): nv = self.order_by_null_values % oc - ordercols.append(nv) + cols.append(nv) p.orderby.append(nv + desc) p.orderby.append(oc + desc) + if 'retrieve' in p.need_for and p.sql_idx is None: + assert(rc) + p.sql_idx = len(cols) + cols.append (rc) props = self.getprops() @@ -2381,11 +2597,8 @@ if mlfilt: # we're joining tables on the id, so we will get dupes if we # don't distinct() - cols = ['distinct(_%s.id)'%icn] - else: - cols = ['_%s.id'%icn] - if ordercols: - cols = cols + ordercols + cols[0] = 'distinct(_%s.id)'%icn + order = [] # keep correct sequence of order attributes. for sa in proptree.sortattr: @@ -2396,21 +2609,50 @@ order = ' order by %s'%(','.join(order)) else: order = '' - for o, p in auxcols.iteritems (): - cols.append (o) - p.auxcol = len (cols) - 1 cols = ','.join(cols) loj = ' '.join(loj) sql = 'select %s from %s %s %s%s'%(cols, frum, loj, where, order) args = tuple(args) __traceback_info__ = (sql, args) + return proptree, sql, args + + def filter(self, search_matches, filterspec, sort=[], group=[]): + """Return a list of the ids of the active nodes in this class that + match the 'filter' spec, sorted by the group spec and then the + sort spec + + "filterspec" is {propname: value(s)} + + "sort" and "group" are [(dir, prop), ...] where dir is '+', '-' + or None and prop is a prop name or None. Note that for + backward-compatibility reasons a single (dir, prop) tuple is + also allowed. + + "search_matches" is a container type or None + + The filter must match all properties specificed. If the property + value to match is a list: + + 1. String properties must match all elements in the list, and + 2. Other properties must match any of the elements in the list. + """ + if __debug__: + start_t = time.time() + + sq = self._filter_sql (search_matches, filterspec, sort, group) + # nothing to match? + if sq is None: + return [] + proptree, sql, args = sq + self.db.sql(sql, args) l = self.db.sql_fetchall() # Compute values needed for sorting in proptree.sort - for p in auxcols.itervalues(): - p.sort_ids = p.sort_result = [row[p.auxcol] for row in l] + for p in proptree: + if hasattr(p, 'auxcol'): + p.sort_ids = p.sort_result = [row[p.auxcol] for row in l] # return the IDs (the first column) # XXX numeric ids l = [str(row[0]) for row in l] @@ -2420,6 +2662,53 @@ self.db.stats['filtering'] += (time.time() - start_t) return l + def filter_iter(self, search_matches, filterspec, sort=[], group=[]): + """Iterator similar to filter above with same args. + Limitation: We don't sort on multilinks. + This uses an optimisation: We put all nodes that are in the + current row into the node cache. Then we return the node id. + That way a fetch of a node won't create another sql-fetch (with + a join) from the database because the nodes are already in the + cache. We're using our own temporary cursor. + """ + sq = self._filter_sql(search_matches, filterspec, sort, group, retr=1) + # nothing to match? + if sq is None: + return + proptree, sql, args = sq + cursor = self.db.conn.cursor() + self.db.sql(sql, args, cursor) + classes = {} + for p in proptree: + if 'retrieve' in p.need_for: + cn = p.parent.classname + ptid = p.parent.id # not the nodeid! + key = (cn, ptid) + if key not in classes: + classes[key] = {} + name = p.name + assert (name) + classes[key][name] = p + p.to_hyperdb = self.db.to_hyperdb_value(p.propclass.__class__) + while True: + row = cursor.fetchone() + if not row: break + # populate cache with current items + for (classname, ptid), pt in classes.iteritems(): + nodeid = str(row[pt['id'].sql_idx]) + key = (classname, nodeid) + if key in self.db.cache: + self.db._cache_refresh(key) + continue + node = {} + for propname, p in pt.iteritems(): + value = row[p.sql_idx] + if value is not None: + value = p.to_hyperdb(value) + node[propname] = value + self.db._cache_save(key, node) + yield str(row[0]) + def filter_sql(self, sql): """Return a list of the ids of the items in this class that match the SQL provided. The SQL is a complete "select" statement. @@ -2471,16 +2760,16 @@ may collide with the names of existing properties, or a ValueError is raised before any properties have been added. """ - for key in properties.keys(): - if self.properties.has_key(key): - raise ValueError, key + for key in properties: + if key in self.properties: + raise ValueError(key) self.properties.update(properties) def index(self, nodeid): """Add (or refresh) the node to search indexes """ # find all the String properties that have indexme - for prop, propclass in self.getprops().items(): + for prop, propclass in self.getprops().iteritems(): if isinstance(propclass, String) and propclass.indexme: self.db.indexer.add_text((self.classname, nodeid, prop), str(self.get(nodeid, prop))) @@ -2519,7 +2808,7 @@ Return the nodeid of the node imported. """ if self.db.journaltag is None: - raise DatabaseError, _('Database open read-only') + raise DatabaseError(_('Database open read-only')) properties = self.getprops() # make the new node's property map @@ -2557,16 +2846,13 @@ elif isinstance(prop, hyperdb.Interval): value = date.Interval(value) elif isinstance(prop, hyperdb.Password): - pwd = password.Password() - pwd.unpack(value) - value = pwd + value = password.Password(encrypted=value) elif isinstance(prop, String): if isinstance(value, unicode): value = value.encode('utf8') if not isinstance(value, str): - raise TypeError, \ - 'new property "%(propname)s" not a string: %(value)r' \ - % locals() + raise TypeError('new property "%(propname)s" not a ' + 'string: %(value)r'%locals()) if prop.indexme: self.db.indexer.add_text((self.classname, newid, propname), value) @@ -2606,8 +2892,8 @@ date = date.get_tuple() if action == 'set': export_data = {} - for propname, value in params.items(): - if not properties.has_key(propname): + for propname, value in params.iteritems(): + if propname not in properties: # property no longer in the schema continue @@ -2627,41 +2913,9 @@ # old tracker with data stored in the create! params = {} l = [nodeid, date, user, action, params] - r.append(map(repr, l)) + r.append(list(map(repr, l))) return r - def import_journals(self, entries): - """Import a class's journal. - - Uses setjournal() to set the journal for each item.""" - properties = self.getprops() - d = {} - for l in entries: - l = map(eval, l) - nodeid, jdate, user, action, params = l - r = d.setdefault(nodeid, []) - if action == 'set': - for propname, value in params.items(): - prop = properties[propname] - if value is None: - pass - elif isinstance(prop, Date): - value = date.Date(value) - elif isinstance(prop, Interval): - value = date.Interval(value) - elif isinstance(prop, Password): - pwd = password.Password() - pwd.unpack(value) - value = pwd - params[propname] = value - elif action == 'create' and params: - # old tracker with data stored in the create! - params = {} - r.append((nodeid, date.Date(jdate), user, action, params)) - - for nodeid, l in d.items(): - self.db.setjournal(self.classname, nodeid, l) - class FileClass(hyperdb.FileClass, Class): """This class defines a large chunk of data. To support this, it has a mandatory String property "content" which is typically saved off @@ -2675,9 +2929,9 @@ """The newly-created class automatically includes the "content" and "type" properties. """ - if not properties.has_key('content'): + if 'content' not in properties: properties['content'] = hyperdb.String(indexme='yes') - if not properties.has_key('type'): + if 'type' not in properties: properties['type'] = hyperdb.String() Class.__init__(self, db, classname, **properties) @@ -2720,7 +2974,7 @@ if propname == 'content': try: return self.db.getfile(self.classname, nodeid, None) - except IOError, (strerror): + except IOError, strerror: # BUG: by catching this we donot see an error in the log. return 'ERROR reading file: %s%s\n%s\n%s'%( self.classname, nodeid, poss_msg, strerror) @@ -2737,7 +2991,7 @@ # now remove the content property so it's not stored in the db content = None - if propvalues.has_key('content'): + if 'content' in propvalues: content = propvalues['content'] del propvalues['content'] @@ -2764,7 +3018,7 @@ Use the content-type property for the content property. """ # find all the String properties that have indexme - for prop, propclass in self.getprops().items(): + for prop, propclass in self.getprops().iteritems(): if prop == 'content' and propclass.indexme: mime_type = self.get(nodeid, 'type', self.default_mime_type) self.db.indexer.add_text((self.classname, nodeid, 'content'), @@ -2788,17 +3042,17 @@ "creation", "creator", "activity" or "actor" property, a ValueError is raised. """ - if not properties.has_key('title'): + if 'title' not in properties: properties['title'] = hyperdb.String(indexme='yes') - if not properties.has_key('messages'): + if 'messages' not in properties: properties['messages'] = hyperdb.Multilink("msg") - if not properties.has_key('files'): + if 'files' not in properties: properties['files'] = hyperdb.Multilink("file") - if not properties.has_key('nosy'): + if 'nosy' not in properties: # note: journalling is turned off as it really just wastes # space. this behaviour may be overridden in an instance properties['nosy'] = hyperdb.Multilink("user", do_journal="no") - if not properties.has_key('superseder'): + if 'superseder' not in properties: properties['superseder'] = hyperdb.Multilink(classname) Class.__init__(self, db, classname, **properties) Modified: tracker/roundup-src/roundup/backends/sessions_dbm.py ============================================================================== --- tracker/roundup-src/roundup/backends/sessions_dbm.py (original) +++ tracker/roundup-src/roundup/backends/sessions_dbm.py Thu Aug 4 15:46:52 2011 @@ -7,9 +7,11 @@ """ __docformat__ = 'restructuredtext' -import anydbm, whichdb, os, marshal, time +import os, marshal, time + from roundup import hyperdb from roundup.i18n import _ +from roundup.anypy.dbm_ import anydbm, whichdb, key_in class BasicDatabase: ''' Provide a nice encapsulation of an anydbm store. @@ -26,7 +28,7 @@ def exists(self, infoid): db = self.opendb('c') try: - return db.has_key(infoid) + return key_in(db, infoid) finally: db.close() @@ -44,10 +46,10 @@ ''' db_type = '' if os.path.exists(path): - db_type = whichdb.whichdb(path) + db_type = whichdb(path) if not db_type: - raise hyperdb.DatabaseError, \ - _("Couldn't identify database type") + raise hyperdb.DatabaseError( + _("Couldn't identify database type")) elif os.path.exists(path+'.db'): # if the path ends in '.db', it's a dbm database, whether # anydbm says it's dbhash or not! @@ -58,12 +60,12 @@ def get(self, infoid, value, default=_marker): db = self.opendb('c') try: - if db.has_key(infoid): + if key_in(db, infoid): values = marshal.loads(db[infoid]) else: if default != self._marker: return default - raise KeyError, 'No such %s "%s"'%(self.name, infoid) + raise KeyError('No such %s "%s"'%(self.name, infoid)) return values.get(value, None) finally: db.close() @@ -76,14 +78,14 @@ del d['__timestamp'] return d except KeyError: - raise KeyError, 'No such %s "%s"'%(self.name, infoid) + raise KeyError('No such %s "%s"'%(self.name, infoid)) finally: db.close() def set(self, infoid, **newvalues): db = self.opendb('c') try: - if db.has_key(infoid): + if key_in(db, infoid): values = marshal.loads(db[infoid]) else: values = {'__timestamp': time.time()} @@ -95,14 +97,14 @@ def list(self): db = self.opendb('r') try: - return db.keys() + return list(db.keys()) finally: db.close() def destroy(self, infoid): db = self.opendb('c') try: - if db.has_key(infoid): + if key_in(db, infoid): del db[infoid] finally: db.close() Modified: tracker/roundup-src/roundup/backends/sessions_rdbms.py ============================================================================== --- tracker/roundup-src/roundup/backends/sessions_rdbms.py (original) +++ tracker/roundup-src/roundup/backends/sessions_rdbms.py Thu Aug 4 15:46:52 2011 @@ -36,7 +36,7 @@ if not res: if default != self._marker: return default - raise KeyError, 'No such %s "%s"'%(self.name, infoid) + raise KeyError('No such %s "%s"'%(self.name, infoid)) values = eval(res[0]) return values.get(value, None) @@ -46,7 +46,7 @@ n, n, self.db.arg), (infoid,)) res = self.cursor.fetchone() if not res: - raise KeyError, 'No such %s "%s"'%(self.name, infoid) + raise KeyError('No such %s "%s"'%(self.name, infoid)) return eval(res[0]) def set(self, infoid, **newvalues): @@ -72,6 +72,12 @@ args = (infoid, time.time(), repr(values)) c.execute(sql, args) + def list(self): + c = self.cursor + n = self.name + c.execute('select %s_key from %ss'%(n, n)) + return [res[0] for res in c.fetchall()] + def destroy(self, infoid): self.cursor.execute('delete from %ss where %s_key=%s'%(self.name, self.name, self.db.arg), (infoid,)) Added: tracker/roundup-src/roundup/cgi/KeywordsExpr.py ============================================================================== --- (empty file) +++ tracker/roundup-src/roundup/cgi/KeywordsExpr.py Thu Aug 4 15:46:52 2011 @@ -0,0 +1,273 @@ +# This module is free software, you may redistribute it +# and/or modify under the same terms as Python. + +WINDOW_CONTENT = '''\ +

Keyword Expression Editor:

+
+
+ +''' + +def list_nodes(request): + prop = request.form.getfirst("property") + cls = request.client.db.getclass(prop) + items = [] + for nodeid in cls.getnodeids(): + l = cls.getnode(nodeid).items() + l = dict([x for x in l if len(x) == 2]) + try: + items.append((l['id'], l['name'])) + except KeyError: + pass + items.sort(key=lambda x: int(x[0])) + return items + +def items_to_keywords(items): + return ',\n '.join(['["%s", "%s"]' % x for x in items]) + + +def render_keywords_expression_editor(request): + prop = request.form.getfirst("property") + + window_content = WINDOW_CONTENT % { + 'prop' : prop, + 'keywords': items_to_keywords(list_nodes(request)), + 'original': '' + } + + return window_content + +# vim: set et sts=4 sw=4 : Modified: tracker/roundup-src/roundup/cgi/accept_language.py ============================================================================== --- tracker/roundup-src/roundup/cgi/accept_language.py (original) +++ tracker/roundup-src/roundup/cgi/accept_language.py Thu Aug 4 15:46:52 2011 @@ -35,7 +35,7 @@ # both lre = re.compile(nqlre + "|" + qlre) -ascii = ''.join([chr(x) for x in xrange(256)]) +ascii = ''.join([chr(x) for x in range(256)]) whitespace = ' \t\n\r\v\f' def parse(language_header): Modified: tracker/roundup-src/roundup/cgi/actions.py ============================================================================== --- tracker/roundup-src/roundup/cgi/actions.py (original) +++ tracker/roundup-src/roundup/cgi/actions.py Thu Aug 4 15:46:52 2011 @@ -1,4 +1,4 @@ -import re, cgi, StringIO, urllib, time, random, csv, codecs +import re, cgi, time, random, csv, codecs from roundup import hyperdb, token, date, password from roundup.actions import Action as BaseAction @@ -6,6 +6,7 @@ import roundup.exceptions from roundup.cgi import exceptions, templating from roundup.mailgw import uidFromAddress +from roundup.anypy import io_, urllib_ __all__ = ['Action', 'ShowAction', 'RetireAction', 'SearchAction', 'EditCSVAction', 'EditItemAction', 'PassResetAction', @@ -53,9 +54,9 @@ if (self.permissionType and not self.hasPermission(self.permissionType)): info = {'action': self.name, 'classname': self.classname} - raise exceptions.Unauthorised, self._( + raise exceptions.Unauthorised(self._( 'You do not have permission to ' - '%(action)s the %(classname)s class.')%info + '%(action)s the %(classname)s class.')%info) _marker = [] def hasPermission(self, permission, classname=_marker, itemid=None, property=None): @@ -79,23 +80,23 @@ def handle(self): """Show a node of a particular class/id.""" t = n = '' - for key in self.form.keys(): + for key in self.form: if self.typere.match(key): t = self.form[key].value.strip() elif self.numre.match(key): n = self.form[key].value.strip() if not t: - raise ValueError, self._('No type specified') + raise ValueError(self._('No type specified')) if not n: - raise exceptions.SeriousError, self._('No ID entered') + raise exceptions.SeriousError(self._('No ID entered')) try: int(n) except ValueError: d = {'input': n, 'classname': t} - raise exceptions.SeriousError, self._( - '"%(input)s" is not an ID (%(classname)s ID required)')%d + raise exceptions.SeriousError(self._( + '"%(input)s" is not an ID (%(classname)s ID required)')%d) url = '%s%s%s'%(self.base, t, n) - raise exceptions.Redirect, url + raise exceptions.Redirect(url) class RetireAction(Action): name = 'retire' @@ -116,15 +117,15 @@ # make sure we don't try to retire admin or anonymous if self.classname == 'user' and \ self.db.user.get(itemid, 'username') in ('admin', 'anonymous'): - raise ValueError, self._( - 'You may not retire the admin or anonymous user') + raise ValueError(self._( + 'You may not retire the admin or anonymous user')) # check permission if not self.hasPermission('Retire', classname=self.classname, itemid=itemid): - raise exceptions.Unauthorised, self._( + raise exceptions.Unauthorised(self._( 'You do not have permission to retire %(class)s' - ) % {'class': self.classname} + ) % {'class': self.classname}) # do the retire self.db.getclass(self.classname).retire(itemid) @@ -171,14 +172,14 @@ try: qid = self.db.query.lookup(old_queryname) if not self.hasPermission('Edit', 'query', itemid=qid): - raise exceptions.Unauthorised, self._( - "You do not have permission to edit queries") + raise exceptions.Unauthorised(self._( + "You do not have permission to edit queries")) self.db.query.set(qid, klass=self.classname, url=url) except KeyError: # create a query if not self.hasPermission('Create', 'query'): - raise exceptions.Unauthorised, self._( - "You do not have permission to store queries") + raise exceptions.Unauthorised(self._( + "You do not have permission to store queries")) qid = self.db.query.create(name=queryname, klass=self.classname, url=url) else: @@ -199,15 +200,15 @@ if old_queryname != self.db.query.get(qid, 'name'): continue if not self.hasPermission('Edit', 'query', itemid=qid): - raise exceptions.Unauthorised, self._( - "You do not have permission to edit queries") + raise exceptions.Unauthorised(self._( + "You do not have permission to edit queries")) self.db.query.set(qid, klass=self.classname, url=url, name=queryname) else: # create a query if not self.hasPermission('Create', 'query'): - raise exceptions.Unauthorised, self._( - "You do not have permission to store queries") + raise exceptions.Unauthorised(self._( + "You do not have permission to store queries")) qid = self.db.query.create(name=queryname, klass=self.classname, url=url, private_for=uid) @@ -223,7 +224,7 @@ def fakeFilterVars(self): """Add a faked :filter form variable for each filtering prop.""" cls = self.db.classes[self.classname] - for key in self.form.keys(): + for key in self.form: prop = cls.get_transitive_prop(key) if not prop: continue @@ -269,7 +270,7 @@ def getFromForm(self, name): for key in ('@' + name, ':' + name): - if self.form.has_key(key): + if key in self.form: return self.form[key].value.strip() return '' @@ -293,7 +294,7 @@ # figure the properties list for the class cl = self.db.classes[self.classname] - props_without_id = cl.getprops(protected=0).keys() + props_without_id = list(cl.getprops(protected=0)) # the incoming CSV data will always have the properties in colums # sorted and starting with the "id" column @@ -301,7 +302,7 @@ props = ['id'] + props_without_id # do the edit - rows = StringIO.StringIO(self.form['rows'].value) + rows = io_.BytesIO(self.form['rows'].value) reader = csv.reader(rows) found = {} line = 0 @@ -322,9 +323,14 @@ # check permission to create this item if not self.hasPermission('Create', classname=self.classname): - raise exceptions.Unauthorised, self._( + raise exceptions.Unauthorised(self._( 'You do not have permission to create %(class)s' - ) % {'class': self.classname} + ) % {'class': self.classname}) + elif cl.hasnode(itemid) and cl.is_retired(itemid): + # If a CSV line just mentions an id and the corresponding + # item is retired, then the item is restored. + cl.restore(itemid) + continue else: exists = 1 @@ -340,9 +346,9 @@ # check permission to edit this property on this item if exists and not self.hasPermission('Edit', itemid=itemid, classname=self.classname, property=name): - raise exceptions.Unauthorised, self._( + raise exceptions.Unauthorised(self._( 'You do not have permission to edit %(class)s' - ) % {'class': self.classname} + ) % {'class': self.classname}) prop = cl.properties[name] value = value.strip() @@ -352,7 +358,7 @@ if isinstance(prop, hyperdb.Multilink): value = value.split(':') elif isinstance(prop, hyperdb.Password): - value = password.Password(value) + value = password.Password(value, config=self.db.config) elif isinstance(prop, hyperdb.Interval): value = date.Interval(value) elif isinstance(prop, hyperdb.Date): @@ -379,13 +385,13 @@ # retire the removed entries for itemid in cl.list(): - if not found.has_key(itemid): + if itemid not in found: # check permission to retire this item if not self.hasPermission('Retire', itemid=itemid, classname=self.classname): - raise exceptions.Unauthorised, self._( + raise exceptions.Unauthorised(self._( 'You do not have permission to retire %(class)s' - ) % {'class': self.classname} + ) % {'class': self.classname}) cl.retire(itemid) # all OK @@ -405,12 +411,12 @@ links = {} for cn, nodeid, propname, vlist in all_links: numeric_id = int (nodeid or 0) - if not (numeric_id > 0 or all_props.has_key((cn, nodeid))): + if not (numeric_id > 0 or (cn, nodeid) in all_props): # link item to link to doesn't (and won't) exist continue for value in vlist: - if not all_props.has_key(value): + if value not in all_props: # link item to link to doesn't (and won't) exist continue deps.setdefault((cn, nodeid), []).append(value) @@ -422,19 +428,19 @@ # loop detection change = 0 while len(all_props) != len(done): - for needed in all_props.keys(): - if done.has_key(needed): + for needed in all_props: + if needed in done: continue tlist = deps.get(needed, []) for target in tlist: - if not done.has_key(target): + if target not in done: break else: done[needed] = 1 order.append(needed) change = 1 if not change: - raise ValueError, 'linking must not loop!' + raise ValueError('linking must not loop!') # now, edit / create m = [] @@ -448,7 +454,7 @@ # and some nice feedback for the user if props: - info = ', '.join(map(self._, props.keys())) + info = ', '.join(map(self._, props)) m.append( self._('%(class)s %(id)s %(properties)s edited ok') % {'class':cn, 'id':nodeid, 'properties':info}) @@ -469,18 +475,18 @@ % {'class':cn, 'id':newid}) # fill in new ids in links - if links.has_key(needed): + if needed in links: for linkcn, linkid, linkprop in links[needed]: props = all_props[(linkcn, linkid)] cl = self.db.classes[linkcn] propdef = cl.getprops()[linkprop] - if not props.has_key(linkprop): + if linkprop not in props: if linkid is None or linkid.startswith('-'): # linking to a new item if isinstance(propdef, hyperdb.Multilink): - props[linkprop] = [newid] + props[linkprop] = [nodeid] else: - props[linkprop] = newid + props[linkprop] = nodeid else: # linking to an existing item if isinstance(propdef, hyperdb.Multilink): @@ -488,7 +494,7 @@ existing.append(nodeid) props[linkprop] = existing else: - props[linkprop] = newid + props[linkprop] = nodeid return '
'.join(m) @@ -496,9 +502,9 @@ """Change the node based on the contents of the form.""" # check for permission if not self.editItemPermission(props, classname=cn, itemid=nodeid): - raise exceptions.Unauthorised, self._( + raise exceptions.Unauthorised(self._( 'You do not have permission to edit %(class)s' - ) % {'class': cn} + ) % {'class': cn}) # make the changes cl = self.db.classes[cn] @@ -508,9 +514,9 @@ """Create a node based on the contents of the form.""" # check for permission if not self.newItemPermission(props, classname=cn): - raise exceptions.Unauthorised, self._( + raise exceptions.Unauthorised(self._( 'You do not have permission to create %(class)s' - ) % {'class': cn} + ) % {'class': cn}) # create the node and return its id cl = self.db.classes[cn] @@ -551,24 +557,19 @@ if not self.hasPermission('Create', classname=classname): return 0 - # Check Edit permission for each property, to avoid being able + # Check Create permission for each property, to avoid being able # to set restricted ones on new item creation for key in props: - if not self.hasPermission('Edit', classname=classname, + if not self.hasPermission('Create', classname=classname, property=key): - # We restrict by default and special-case allowed properties - if key == 'date' or key == 'content': - continue - elif key == 'author' and props[key] == self.userid: - continue return 0 return 1 class EditItemAction(EditCommon): def lastUserActivity(self): - if self.form.has_key(':lastactivity'): + if ':lastactivity' in self.form: d = date.Date(self.form[':lastactivity'].value) - elif self.form.has_key('@lastactivity'): + elif '@lastactivity' in self.form: d = date.Date(self.form['@lastactivity'].value) else: return None @@ -588,7 +589,7 @@ props, links = self.client.parsePropsFromForm() key = (self.classname, self.nodeid) # we really only collide for direct prop edit conflicts - return props[key].keys() + return list(props[key]) else: return [] @@ -638,12 +639,12 @@ # we will want to include index-page args in this URL too if self.nodeid is not None: url += self.nodeid - url += '?@ok_message=%s&@template=%s'%(urllib.quote(message), - urllib.quote(self.template)) + url += '?@ok_message=%s&@template=%s'%(urllib_.quote(message), + urllib_.quote(self.template)) if self.nodeid is None: req = templating.HTMLRequest(self.client) url += '&' + req.indexargs_url('', {})[1:] - raise exceptions.Redirect, url + raise exceptions.Redirect(url) class NewItemAction(EditCommon): def handle(self): @@ -678,9 +679,9 @@ self.db.commit() # redirect to the new item's page - raise exceptions.Redirect, '%s%s%s?@ok_message=%s&@template=%s' % ( - self.base, self.classname, self.nodeid, urllib.quote(messages), - urllib.quote(self.template)) + raise exceptions.Redirect('%s%s%s?@ok_message=%s&@template=%s' % ( + self.base, self.classname, self.nodeid, urllib_.quote(messages), + urllib_.quote(self.template))) class PassResetAction(Action): def handle(self): @@ -691,7 +692,7 @@ """ otks = self.db.getOTKManager() - if self.form.has_key('otk'): + if 'otk' in self.form: # pull the rego information out of the otk database otk = self.form['otk'].value uid = otks.get(otk, 'uid', default=None) @@ -715,7 +716,7 @@ # XXX we need to make the "default" page be able to display errors! try: # set the password - cl.set(uid, password=password.Password(newpw)) + cl.set(uid, password=password.Password(newpw, config=self.db.config)) # clear the props from the otk database otks.destroy(otk) self.db.commit() @@ -743,7 +744,7 @@ return # no OTK, so now figure the user - if self.form.has_key('username'): + if 'username' in self.form: name = self.form['username'].value try: uid = self.db.user.lookup(name) @@ -751,7 +752,7 @@ self.client.error_message.append(self._('Unknown username')) return address = self.db.user.get(uid, 'address') - elif self.form.has_key('address'): + elif 'address' in self.form: address = self.form['address'].value uid = uidFromAddress(self.db, ('', address), create=0) if not uid: @@ -802,7 +803,7 @@ # nice message message = self._('You are now registered, welcome!') url = '%suser%s?@ok_message=%s'%(self.base, self.userid, - urllib.quote(message)) + urllib_.quote(message)) # redirect to the user's page (but not 302, as some email clients seem # to want to reload the page, or something) @@ -845,12 +846,6 @@ % str(message)) return - # registration isn't allowed to supply roles - user_props = props[('user', None)] - if user_props.has_key('roles'): - raise exceptions.Unauthorised, self._( - "It is not permitted to supply roles at registration.") - # skip the confirmation step? if self.db.config['INSTANT_REGISTRATION']: # handle the create now @@ -875,7 +870,8 @@ return self.finishRego() # generate the one-time-key and store the props for later - for propname, proptype in self.db.user.getprops().items(): + user_props = props[('user', None)] + for propname, proptype in self.db.user.getprops().iteritems(): value = user_props.get(propname, None) if value is None: pass @@ -926,7 +922,18 @@ self.db.commit() # redirect to the "you're almost there" page - raise exceptions.Redirect, '%suser?@template=rego_progress'%self.base + raise exceptions.Redirect('%suser?@template=rego_progress'%self.base) + + def newItemPermission(self, props, classname=None): + """Just check the "Register" permission. + """ + # registration isn't allowed to supply roles + if 'roles' in props: + raise exceptions.Unauthorised(self._( + "It is not permitted to supply roles at registration.")) + + # technically already checked, but here for clarity + return self.hasPermission('Register', classname=classname) class LogoutAction(Action): def handle(self): @@ -956,13 +963,13 @@ raise roundup.exceptions.Reject(self._('Invalid request')) # we need the username at a minimum - if not self.form.has_key('__login_name'): + if '__login_name' not in self.form: self.client.error_message.append(self._('Username required')) return # get the login info self.client.user = self.form['__login_name'].value - if self.form.has_key('__login_password'): + if '__login_password' in self.form: password = self.form['__login_password'].value else: password = '' @@ -979,36 +986,43 @@ # save user in session self.client.session_api.set(user=self.client.user) - if self.form.has_key('remember'): + if 'remember' in self.form: self.client.session_api.update(set_cookie=True, expire=24*3600*365) # If we came from someplace, go back there - if self.form.has_key('__came_from'): - raise exceptions.Redirect, self.form['__came_from'].value + if '__came_from' in self.form: + raise exceptions.Redirect(self.form['__came_from'].value) def verifyLogin(self, username, password): # make sure the user exists try: self.client.userid = self.db.user.lookup(username) except KeyError: - raise exceptions.LoginError, self._('Invalid login') + raise exceptions.LoginError(self._('Invalid login')) # verify the password if not self.verifyPassword(self.client.userid, password): - raise exceptions.LoginError, self._('Invalid login') + raise exceptions.LoginError(self._('Invalid login')) # Determine whether the user has permission to log in. # Base behaviour is to check the user has "Web Access". if not self.hasPermission("Web Access"): - raise exceptions.LoginError, self._( - "You do not have permission to login") + raise exceptions.LoginError(self._( + "You do not have permission to login")) - def verifyPassword(self, userid, password): - '''Verify the password that the user has supplied''' - stored = self.db.user.get(userid, 'password') - if password == stored: + def verifyPassword(self, userid, givenpw): + '''Verify the password that the user has supplied. + Optionally migrate to new password scheme if configured + ''' + db = self.db + stored = db.user.get(userid, 'password') + if givenpw == stored: + if db.config.WEB_MIGRATE_PASSWORDS and stored.needs_migration(): + newpw = password.Password(givenpw, config=db.config) + db.user.set(userid, password=newpw) + db.commit() return 1 - if not password and not stored: + if not givenpw and not stored: return 1 return 0 @@ -1067,9 +1081,9 @@ # check permission to view this property on this item if not self.hasPermission('View', itemid=itemid, classname=request.classname, property=name): - raise exceptions.Unauthorised, self._( + raise exceptions.Unauthorised(self._( 'You do not have permission to view %(class)s' - ) % {'class': request.classname} + ) % {'class': request.classname}) row.append(str(klass.get(itemid, name))) self.client._socket_op(writer.writerow, row) @@ -1102,7 +1116,7 @@ def execute_cgi(self): args = {} - for key in self.form.keys(): + for key in self.form: args[key] = self.form.getvalue(key) self.permission(args) return self.handle(args) Modified: tracker/roundup-src/roundup/cgi/cgitb.py ============================================================================== --- tracker/roundup-src/roundup/cgi/cgitb.py (original) +++ tracker/roundup-src/roundup/cgi/cgitb.py Thu Aug 4 15:46:52 2011 @@ -37,9 +37,7 @@ def niceDict(indent, dict): l = [] - keys = dict.keys() - keys.sort() - for k in keys: + for k in sorted(dict): v = dict[k] l.append('%s%s'%(k, cgi.escape(repr(v)))) @@ -59,7 +57,7 @@ t.reverse() for frame, file, lnum, func, lines, index in t: args, varargs, varkw, locals = inspect.getargvalues(frame) - if locals.has_key('__traceback_info__'): + if '__traceback_info__' in locals: ti = locals['__traceback_info__'] if isinstance(ti, TraversalError): s = [] @@ -72,7 +70,7 @@ ) % {'name': ti.name, 'path': s}) else: l.append(_('
  • In %s
  • ') % esc(str(ti))) - if locals.has_key('__traceback_supplement__'): + if '__traceback_supplement__' in locals: ts = locals['__traceback_supplement__'] if len(ts) == 2: supp, context = ts @@ -111,8 +109,8 @@ def html(context=5, i18n=None): _ = get_translator(i18n) - etype, evalue = sys.exc_type, sys.exc_value - if type(etype) is types.ClassType: + etype, evalue = sys.exc_info()[0], sys.exc_info()[1] + if type(etype) is type: etype = etype.__name__ pyver = 'Python ' + string.split(sys.version)[0] + '
    ' + sys.executable head = pydoc.html.heading( @@ -169,13 +167,13 @@ lvals = [] for name in names: if name in frame.f_code.co_varnames: - if locals.has_key(name): + if name in locals: value = pydoc.html.repr(locals[name]) else: value = _('undefined') name = '%s' % name else: - if frame.f_globals.has_key(name): + if name in frame.f_globals: value = pydoc.html.repr(frame.f_globals[name]) else: value = _('undefined') Modified: tracker/roundup-src/roundup/cgi/client.py ============================================================================== --- tracker/roundup-src/roundup/cgi/client.py (original) +++ tracker/roundup-src/roundup/cgi/client.py Thu Aug 4 15:46:52 2011 @@ -2,11 +2,9 @@ """ __docformat__ = 'restructuredtext' -import base64, binascii, cgi, codecs, httplib, mimetypes, os -import quopri, random, re, rfc822, stat, sys, time, urllib, urlparse -import Cookie, socket, errno -from Cookie import CookieError, BaseCookie, SimpleCookie -from cStringIO import StringIO +import base64, binascii, cgi, codecs, mimetypes, os +import quopri, random, re, rfc822, stat, sys, time +import socket, errno from roundup import roundupdb, date, hyperdb, password from roundup.cgi import templating, cgitb, TranslationService @@ -18,6 +16,12 @@ from roundup.cgi import accept_language from roundup import xmlrpc +from roundup.anypy.cookie_ import CookieError, BaseCookie, SimpleCookie, \ + get_cookie_date +from roundup.anypy.io_ import StringIO +from roundup.anypy import http_ +from roundup.anypy import urllib_ + def initialiseSecurity(security): '''Create some Permissions and Roles on the security object @@ -43,7 +47,7 @@ def clean_message_callback(match, ok={'a':1,'i':1,'b':1,'br':1}): """ Strip all non
    ,, and
    tags from a string """ - if ok.has_key(match.group(3).lower()): + if match.group(3).lower() in ok: return match.group(1) return '<%s>'%match.group(2) @@ -293,14 +297,14 @@ # this is the "cookie path" for this tracker (ie. the path part of # the "base" url) - self.cookie_path = urlparse.urlparse(self.base)[2] + self.cookie_path = urllib_.urlparse(self.base)[2] # cookies to set in http responce # {(path, name): (value, expire)} self._cookies = {} # see if we need to re-parse the environment for the form (eg Zope) if form is None: - self.form = cgi.FieldStorage(environ=env) + self.form = cgi.FieldStorage(fp=request.rfile, environ=env) else: self.form = form @@ -380,6 +384,7 @@ self.determine_language() # Open the database as the correct user. self.determine_user() + self.check_anonymous_access() # Call the appropriate XML-RPC method. handler = xmlrpc.RoundupDispatcher(self.db, @@ -437,6 +442,11 @@ # figure out the context and desired content template self.determine_context() + # if we've made it this far the context is to a bit of + # Roundup's real web interface (not a file being served up) + # so do the Anonymous Web Acess check now + self.check_anonymous_access() + # possibly handle a form submit action (may change self.classname # and self.template, and may also append error/ok_messages) html = self.handle_action() @@ -495,12 +505,12 @@ # authorization, send back a response that will cause the # browser to prompt the user again. if self.instance.config.WEB_HTTP_AUTH: - self.response_code = httplib.UNAUTHORIZED + self.response_code = http_.client.UNAUTHORIZED realm = self.instance.config.TRACKER_NAME self.setHeader("WWW-Authenticate", "Basic realm=\"%s\"" % realm) else: - self.response_code = httplib.FORBIDDEN + self.response_code = http_.client.FORBIDDEN self.renderFrontPage(message) except Unauthorised, message: # users may always see the front page @@ -520,15 +530,15 @@ # we can't map the URL to a class we know about # reraise the NotFound and let roundup_server # handle it - raise NotFound, e + raise NotFound(e) except FormError, e: self.error_message.append(self._('Form Error: ') + str(e)) self.write_html(self.renderContext()) except: # Something has gone badly wrong. Therefore, we should # make sure that the response code indicates failure. - if self.response_code == httplib.OK: - self.response_code = httplib.INTERNAL_SERVER_ERROR + if self.response_code == http_.client.OK: + self.response_code = http_.client.INTERNAL_SERVER_ERROR # Help the administrator work out what went wrong. html = ("

    Traceback

    " + cgitb.html(i18n=self.translator) @@ -611,12 +621,12 @@ """ # look for client charset charset_parameter = 0 - if self.form.has_key('@charset'): + if '@charset' in self.form: charset = self.form['@charset'].value if charset.lower() == "none": charset = "" charset_parameter = 1 - elif self.cookie.has_key('roundup_charset'): + elif 'roundup_charset' in self.cookie: charset = self.cookie['roundup_charset'].value else: charset = None @@ -653,7 +663,7 @@ uc = int(num) return unichr(uc) - for field_name in self.form.keys(): + for field_name in self.form: field = self.form[field_name] if (field.type == 'text/plain') and not field.filename: try: @@ -668,12 +678,12 @@ # look for language parameter # then for language cookie # last for the Accept-Language header - if self.form.has_key("@language"): + if "@language" in self.form: language = self.form["@language"].value if language.lower() == "none": language = "" self.add_cookie("roundup_language", language) - elif self.cookie.has_key("roundup_language"): + elif "roundup_language" in self.cookie: language = self.cookie["roundup_language"].value elif self.instance.config["WEB_USE_BROWSER_LANGUAGE"]: hal = self.env.get('HTTP_ACCEPT_LANGUAGE') @@ -701,7 +711,7 @@ user = None # first up, try http authorization if enabled if self.instance.config['WEB_HTTP_AUTH']: - if self.env.has_key('REMOTE_USER'): + if 'REMOTE_USER' in self.env: # we have external auth (e.g. by Apache) user = self.env['REMOTE_USER'] elif self.env.get('HTTP_AUTHORIZATION', ''): @@ -745,15 +755,44 @@ # make sure the anonymous user is valid if we're using it if user == 'anonymous': self.make_user_anonymous() - if not self.db.security.hasPermission('Web Access', self.userid): - raise Unauthorised, self._("Anonymous users are not " - "allowed to use the web interface") else: self.user = user # reopen the database as the correct user self.opendb(self.user) + def check_anonymous_access(self): + """Check that the Anonymous user is actually allowed to use the web + interface and short-circuit all further processing if they're not. + """ + # allow Anonymous to use the "login" and "register" actions (noting + # that "register" has its own "Register" permission check) + + if ':action' in self.form: + action = self.form[':action'] + elif '@action' in self.form: + action = self.form['@action'] + else: + action = '' + if isinstance(action, list): + raise SeriousError('broken form: multiple @action values submitted') + elif action != '': + action = action.value.lower() + if action in ('login', 'register'): + return + + # allow Anonymous to view the "user" "register" template if they're + # allowed to register + if (self.db.security.hasPermission('Register', self.userid, 'user') + and self.classname == 'user' and self.template == 'register'): + return + + # otherwise for everything else + if self.user == 'anonymous': + if not self.db.security.hasPermission('Web Access', self.userid): + raise Unauthorised(self._("Anonymous users are not " + "allowed to use the web interface")) + def opendb(self, username): """Open the database and set the current user. @@ -826,7 +865,7 @@ # see if a template or messages are specified template_override = ok_message = error_message = None - for key in self.form.keys(): + for key in self.form: if self.FV_TEMPLATE.match(key): template_override = self.form[key].value elif self.FV_OK_MESSAGE.match(key): @@ -851,12 +890,12 @@ self.template = '' return elif path[0] in ('_file', '@@file'): - raise SendStaticFile, os.path.join(*path[1:]) + raise SendStaticFile(os.path.join(*path[1:])) else: self.classname = path[0] if len(path) > 1: # send the file identified by the designator in path[0] - raise SendFile, path[0] + raise SendFile(path[0]) # see if we got a designator m = dre.match(self.classname) @@ -866,13 +905,13 @@ try: klass = self.db.getclass(self.classname) except KeyError: - raise NotFound, '%s/%s'%(self.classname, self.nodeid) + raise NotFound('%s/%s'%(self.classname, self.nodeid)) if long(self.nodeid) > 2**31: # Postgres will complain with a ProgrammingError # if we try to pass in numbers that are too large - raise NotFound, '%s/%s'%(self.classname, self.nodeid) + raise NotFound('%s/%s'%(self.classname, self.nodeid)) if not klass.hasnode(self.nodeid): - raise NotFound, '%s/%s'%(self.classname, self.nodeid) + raise NotFound('%s/%s'%(self.classname, self.nodeid)) # with a designator, we default to item view self.template = 'item' else: @@ -883,7 +922,7 @@ try: self.db.getclass(self.classname) except KeyError: - raise NotFound, self.classname + raise NotFound(self.classname) # see if we have a template override if template_override is not None: @@ -894,34 +933,39 @@ """ m = dre.match(str(designator)) if not m: - raise NotFound, str(designator) + raise NotFound(str(designator)) classname, nodeid = m.group(1), m.group(2) try: klass = self.db.getclass(classname) except KeyError: # The classname was not valid. - raise NotFound, str(designator) - + raise NotFound(str(designator)) + + # perform the Anonymous user access check + self.check_anonymous_access() # make sure we have the appropriate properties props = klass.getprops() - if not props.has_key('type'): - raise NotFound, designator - if not props.has_key('content'): - raise NotFound, designator + if 'type' not in props: + raise NotFound(designator) + if 'content' not in props: + raise NotFound(designator) # make sure we have permission if not self.db.security.hasPermission('View', self.userid, classname, 'content', nodeid): - raise Unauthorised, self._("You are not allowed to view " - "this file.") + raise Unauthorised(self._("You are not allowed to view " + "this file.")) - # MvL 20100404: catch IndexError + # MvL 20100404: catch IndexError (issue #326) try: mime_type = klass.get(nodeid, 'type') except IndexError: - raise NotFound, designator + raise NotFound(designator) + # Can happen for msg class: + if not mime_type: + mime_type = 'text/plain' # if the mime_type is HTML-ish then make sure we're allowed to serve up # HTML-ish content @@ -969,7 +1013,7 @@ if os.path.isfile(filename) and filename.startswith(prefix): break else: - raise NotFound, file + raise NotFound(file) # last-modified time lmt = os.stat(filename)[stat.ST_MTIME] @@ -998,7 +1042,7 @@ # XXX see which interfaces set this #if hasattr(self.request, 'headers'): #ims = self.request.headers.getheader('if-modified-since') - if self.env.has_key('HTTP_IF_MODIFIED_SINCE'): + if 'HTTP_IF_MODIFIED_SINCE' in self.env: # cgi will put the header in the env var ims = self.env['HTTP_IF_MODIFIED_SINCE'] if ims: @@ -1022,7 +1066,7 @@ message['Content-type'] = 'text/html; charset=utf-8' message.set_payload(content) encode_quopri(message) - self.mailer.smtp_send(to, str(message)) + self.mailer.smtp_send(to, message.as_string()) def renderFrontPage(self, message): """Return the front page of the tracker.""" @@ -1067,9 +1111,9 @@ result = result.replace('', s) return result except templating.NoTemplate, message: - return '%s'%message + return '%s'%cgi.escape(str(message)) except templating.Unauthorised, message: - raise Unauthorised, str(message) + raise Unauthorised(cgi.escape(str(message))) except: # everything else if self.instance.config.WEB_DEBUG: @@ -1087,7 +1131,7 @@ # receive an error message, and the adminstrator will # receive a traceback, albeit with less information # than the one we tried to generate above. - raise exc_info[0], exc_info[1], exc_info[2] + raise exc_info[0](exc_info[1]).with_traceback(exc_info[2]) # these are the actions that are available actions = ( @@ -1117,13 +1161,18 @@ We explicitly catch Reject and ValueError exceptions and present their messages to the user. """ - if self.form.has_key(':action'): - action = self.form[':action'].value.lower() - elif self.form.has_key('@action'): - action = self.form['@action'].value.lower() + if ':action' in self.form: + action = self.form[':action'] + elif '@action' in self.form: + action = self.form['@action'] else: return None + if isinstance(action, list): + raise SeriousError('broken form: multiple @action values submitted') + else: + action = action.value.lower() + try: action_klass = self.get_action_class(action) @@ -1139,7 +1188,7 @@ def get_action_class(self, action_name): if (hasattr(self.instance, 'cgi_actions') and - self.instance.cgi_actions.has_key(action_name)): + action_name in self.instance.cgi_actions): # tracker-defined action action_klass = self.instance.cgi_actions[action_name] else: @@ -1148,7 +1197,7 @@ if name == action_name: break else: - raise ValueError, 'No such action "%s"'%action_name + raise ValueError('No such action "%s"'%action_name) return action_klass def _socket_op(self, call, *args, **kwargs): @@ -1188,7 +1237,7 @@ def write_html(self, content): if not self.headers_done: # at this point, we are sure about Content-Type - if not self.additional_headers.has_key('Content-Type'): + if 'Content-Type' not in self.additional_headers: self.additional_headers['Content-Type'] = \ 'text/html; charset=%s' % self.charset self.header() @@ -1350,14 +1399,14 @@ return None # Return code 416 with a Content-Range header giving the # allowable range. - self.response_code = httplib.REQUESTED_RANGE_NOT_SATISFIABLE + self.response_code = http_.client.REQUESTED_RANGE_NOT_SATISFIABLE self.setHeader("Content-Range", "bytes */%d" % length) return None # RFC 2616 10.2.7: 206 Partial Content # # Tell the client that we are honoring the Range request by # indicating that we are providing partial content. - self.response_code = httplib.PARTIAL_CONTENT + self.response_code = http_.client.PARTIAL_CONTENT # RFC 2616 14.16: Content-Range # # Tell the client what data we are providing. @@ -1411,7 +1460,7 @@ # If the client doesn't actually want the body, or if we are # indicating an invalid range. if (self.env['REQUEST_METHOD'] == 'HEAD' - or self.response_code == httplib.REQUESTED_RANGE_NOT_SATISFIABLE): + or self.response_code == http_.client.REQUESTED_RANGE_NOT_SATISFIABLE): return # Use the optimized "sendfile" operation, if possible. if hasattr(self.request, "sendfile"): @@ -1446,12 +1495,12 @@ if headers.get('Content-Type', 'text/html') == 'text/html': headers['Content-Type'] = 'text/html; charset=utf-8' - headers = headers.items() + headers = list(headers.items()) - for ((path, name), (value, expire)) in self._cookies.items(): + for ((path, name), (value, expire)) in self._cookies.iteritems(): cookie = "%s=%s; Path=%s;"%(name, value, path) if expire is not None: - cookie += " expires=%s;"%Cookie._getdate(expire) + cookie += " expires=%s;"%get_cookie_date(expire) headers.append(('Set-Cookie', cookie)) self._socket_op(self.request.start_response, headers, response) Modified: tracker/roundup-src/roundup/cgi/form_parser.py ============================================================================== --- tracker/roundup-src/roundup/cgi/form_parser.py (original) +++ tracker/roundup-src/roundup/cgi/form_parser.py Thu Aug 4 15:46:52 2011 @@ -383,7 +383,7 @@ raise FormError, self._('Password and confirmation text ' 'do not match') try: - value = password.Password(value) + value = password.Password(value, config=self.db.config) except hyperdb.HyperdbValueError, msg: raise FormError, msg Modified: tracker/roundup-src/roundup/cgi/templating.py ============================================================================== --- tracker/roundup-src/roundup/cgi/templating.py (original) +++ tracker/roundup-src/roundup/cgi/templating.py Thu Aug 4 15:46:52 2011 @@ -27,6 +27,8 @@ from roundup import i18n from roundup.i18n import _ +from KeywordsExpr import render_keywords_expression_editor + try: import cPickle as pickle except ImportError: @@ -115,9 +117,9 @@ if os.path.exists(src): return (src, generic) - raise NoTemplate, 'No template file exists for templating "%s" '\ + raise NoTemplate('No template file exists for templating "%s" ' 'with template "%s" (neither "%s" nor "%s")'%(name, view, - filename, generic) + filename, generic)) class Templates: templates = {} @@ -520,20 +522,23 @@ def is_edit_ok(self): """ Is the user allowed to Create the current class? """ - return self._db.security.hasPermission('Create', self._client.userid, - self._classname) + perm = self._db.security.hasPermission + return perm('Web Access', self._client.userid) and perm('Create', + self._client.userid, self._classname) def is_retire_ok(self): """ Is the user allowed to retire items of the current class? """ - return self._db.security.hasPermission('Retire', self._client.userid, - self._classname) + perm = self._db.security.hasPermission + return perm('Web Access', self._client.userid) and perm('Retire', + self._client.userid, self._classname) def is_view_ok(self): """ Is the user allowed to View the current class? """ - return self._db.security.hasPermission('View', self._client.userid, - self._classname) + perm = self._db.security.hasPermission + return perm('Web Access', self._client.userid) and perm('View', + self._client.userid, self._classname) def is_only_view_ok(self): """ Is the user only allowed to View (ie. not Create) the current class? @@ -562,10 +567,7 @@ for klass, htmlklass in propclasses: if not isinstance(prop, klass): continue - if isinstance(prop, hyperdb.Multilink): - value = [] - else: - value = None + value = prop.get_default_value() return htmlklass(self._client, self._classname, None, prop, item, value, self._anonymous) @@ -598,13 +600,10 @@ l = [] for name, prop in self._props.items(): for klass, htmlklass in propclasses: - if isinstance(prop, hyperdb.Multilink): - value = [] - else: - value = None if isinstance(prop, klass): + value = prop.get_default_value() l.append(htmlklass(self._client, self._classname, '', - prop, name, value, self._anonymous)) + prop, name, value, self._anonymous)) if sort: l.sort(lambda a,b:cmp(a._name, b._name)) return l @@ -620,6 +619,8 @@ # check perms check = self._client.db.security.hasPermission userid = self._client.userid + if not check('Web Access', userid): + return [] l = [HTMLItem(self._client, self._classname, id) for id in l if check('View', userid, self._classname, itemid=id)] @@ -634,11 +635,14 @@ writer = csv.writer(s) writer.writerow(props) check = self._client.db.security.hasPermission + userid = self._client.userid + if not check('Web Access', userid): + return '' for nodeid in self._klass.list(): l = [] for name in props: # check permission to view this property on this item - if not check('View', self._client.userid, itemid=nodeid, + if not check('View', userid, itemid=nodeid, classname=self._klass.classname, property=name): raise Unauthorised('view', self._klass.classname, translator=self._client.translator) @@ -665,13 +669,23 @@ "request" takes precedence over the other three arguments. """ + security = self._db.security + userid = self._client.userid if request is not None: + # for a request we asume it has already been + # security-filtered filterspec = request.filterspec sort = request.sort group = request.group + else: + cn = self.classname + filterspec = security.filterFilterspec(userid, cn, filterspec) + sort = security.filterSortspec(userid, cn, sort) + group = security.filterSortspec(userid, cn, group) - check = self._db.security.hasPermission - userid = self._client.userid + check = security.hasPermission + if not check('Web Access', userid): + return [] l = [HTMLItem(self._client, self.classname, id) for id in self._klass.filter(None, filterspec, sort, group) @@ -801,20 +815,23 @@ def is_edit_ok(self): """ Is the user allowed to Edit this item? """ - return self._db.security.hasPermission('Edit', self._client.userid, - self._classname, itemid=self._nodeid) + perm = self._db.security.hasPermission + return perm('Web Access', self._client.userid) and perm('Edit', + self._client.userid, self._classname, itemid=self._nodeid) def is_retire_ok(self): """ Is the user allowed to Reture this item? """ - return self._db.security.hasPermission('Retire', self._client.userid, - self._classname, itemid=self._nodeid) + perm = self._db.security.hasPermission + return perm('Web Access', self._client.userid) and perm('Retire', + self._client.userid, self._classname, itemid=self._nodeid) def is_view_ok(self): """ Is the user allowed to View this item? """ - if self._db.security.hasPermission('View', self._client.userid, - self._classname, itemid=self._nodeid): + perm = self._db.security.hasPermission + if perm('Web Access', self._client.userid) and perm('View', + self._client.userid, self._classname, itemid=self._nodeid): return 1 return self.is_edit_ok() @@ -1091,6 +1108,13 @@ cell[-1] += ' -> %s'%current[k] current[k] = val + elif isinstance(prop, hyperdb.Password) and args[k] is not None: + val = args[k].dummystr() + cell.append('%s: %s'%(self._(k), val)) + if current.has_key(k): + cell[-1] += ' -> %s'%current[k] + current[k] = val + elif not args[k]: if current.has_key(k): cell.append('%s: %s'%(self._(k), current[k])) @@ -1205,12 +1229,9 @@ return self._db.security.hasPermission(permission, self._nodeid, classname, property, itemid) - def hasRole(self, rolename): - """Determine whether the user has the Role.""" - roles = self._db.user.get(self._nodeid, 'roles').split(',') - for role in roles: - if role.strip() == rolename: return True - return False + def hasRole(self, *rolenames): + """Determine whether the user has any role in rolenames.""" + return self._db.user.has_role(self._nodeid, *rolenames) def HTMLItem(client, classname, nodeid, anonymous=0): if classname == 'user': @@ -1240,7 +1261,12 @@ self._anonymous = anonymous self._name = name if not anonymous: - self._formname = '%s%s@%s'%(classname, nodeid, name) + if nodeid: + self._formname = '%s%s@%s'%(classname, nodeid, name) + else: + # This case occurs when creating a property for a + # non-anonymous class. + self._formname = '%s@%s'%(classname, name) else: self._formname = name @@ -1266,8 +1292,9 @@ HTMLInputMixin.__init__(self) def __repr__(self): - return ''%(id(self), self._formname, - self._prop, self._value) + classname = self.__class__.__name__ + return '<%s(0x%x) %s %r %r>'%(classname, id(self), self._formname, + self._prop, self._value) def __str__(self): return self.plain() def __cmp__(self, other): @@ -1287,19 +1314,22 @@ property. Check "Create" for new items, or "Edit" for existing ones. """ + perm = self._db.security.hasPermission + userid = self._client.userid if self._nodeid: - return self._db.security.hasPermission('Edit', self._client.userid, - self._classname, self._name, self._nodeid) - return self._db.security.hasPermission('Create', self._client.userid, - self._classname, self._name) or \ - self._db.security.hasPermission('Register', self._client.userid, - self._classname, self._name) + if not perm('Web Access', userid): + return False + return perm('Edit', userid, self._classname, self._name, + self._nodeid) + return perm('Create', userid, self._classname, self._name) or \ + perm('Register', userid, self._classname, self._name) def is_view_ok(self): """ Is the user allowed to View the current class? """ - if self._db.security.hasPermission('View', self._client.userid, - self._classname, self._name, self._nodeid): + perm = self._db.security.hasPermission + if perm('Web Access', self._client.userid) and perm('View', + self._client.userid, self._classname, self._name, self._nodeid): return 1 return self.is_edit_ok() @@ -1538,7 +1568,10 @@ if self._value is None: return '' - return self._('*encrypted*') + value = self._value.dummystr() + if escape: + value = cgi.escape(value) + return value def field(self, size=30, **kwargs): """ Render a form edit field for the property. @@ -2083,9 +2116,10 @@ check = self._db.security.hasPermission userid = self._client.userid classname = self._prop.classname - for value in values: - if check('View', userid, classname, itemid=value): - yield HTMLItem(self._client, classname, value) + if check('Web Access', userid): + for value in values: + if check('View', userid, classname, itemid=value): + yield HTMLItem(self._client, classname, value) def __iter__(self): """ iterate and return a new HTMLItem @@ -2149,16 +2183,19 @@ return self.plain(escape=1) linkcl = self._db.getclass(self._prop.classname) - value = self._value[:] - # map the id to the label property - if not linkcl.getkey(): - showid=1 - if not showid: - k = linkcl.labelprop(1) - value = lookupKeys(linkcl, k, value) - value = ','.join(value) - return self.input(name=self._formname, size=size, value=value, - **kwargs) + + if 'value' not in kwargs: + value = self._value[:] + # map the id to the label property + if not linkcl.getkey(): + showid=1 + if not showid: + k = linkcl.labelprop(1) + value = lookupKeys(linkcl, k, value) + value = ','.join(value) + kwargs["value"] = value + + return self.input(name=self._formname, size=size, **kwargs) def menu(self, size=None, height=None, showid=0, additional=[], value=None, sort_on=None, html_kwargs = {}, **conditions): @@ -2292,13 +2329,19 @@ def make_sort_function(db, classname, sort_on=None): - """Make a sort function for a given class - """ + """Make a sort function for a given class. + + The list being sorted may contain mixed ids and labels. + """ linkcl = db.getclass(classname) if sort_on is None: sort_on = linkcl.orderprop() def sortfunc(a, b): - return cmp(linkcl.get(a, sort_on), linkcl.get(b, sort_on)) + if num_re.match(a): + a = linkcl.get(a, sort_on) + if num_re.match(b): + b = linkcl.get(b, sort_on) + return cmp(a, b) return sortfunc def handleListCGIValue(value): @@ -2427,12 +2470,16 @@ self.columns = handleListCGIValue(self.form[name]) break self.show = support.TruthDict(self.columns) + security = self._client.db.security + userid = self._client.userid # sorting and grouping self.sort = [] self.group = [] self._parse_sort(self.sort, 'sort') self._parse_sort(self.group, 'group') + self.sort = security.filterSortspec(userid, self.classname, self.sort) + self.group = security.filterSortspec(userid, self.classname, self.group) # filtering self.filter = [] @@ -2462,6 +2509,8 @@ self.filterspec[name] = handleListCGIValue(fv) else: self.filterspec[name] = fv.value + self.filterspec = security.filterFilterspec(userid, self.classname, + self.filterspec) # full-text search argument self.search_text = None @@ -2697,9 +2746,15 @@ ignore[(klass, prop)] = None return ignore - def batch(self, to_ignore='ignore'): + def batch(self, permission='View', to_ignore='ignore'): """ Return a batch object for results from the "current search" """ + check = self._client.db.security.hasPermission + userid = self._client.userid + if not check('Web Access', userid): + return Batch(self.client, [], self.pagesize, self.startwith, + classname=self.classname) + filterspec = self.filterspec sort = self.sort group = self.group @@ -2717,10 +2772,8 @@ matches = None # filter for visibility - check = self._client.db.security.hasPermission - userid = self._client.userid l = [id for id in klass.filter(matches, filterspec, sort, group) - if check('View', userid, self.classname, itemid=id)] + if check(permission, userid, self.classname, itemid=id)] # return the batch object, using IDs only return Batch(self.client, l, self.pagesize, self.startwith, @@ -2845,6 +2898,9 @@ raise AttributeError, name return self.client.instance.templating_utils[name] + def keywords_expressions(self, request): + return render_keywords_expression_editor(request) + def html_calendar(self, request): """Generate a HTML calendar. @@ -2858,7 +2914,9 @@ html will simply be a table. """ - date_str = request.form.getfirst("date", ".") + tz = request.client.db.getUserTimezone() + current_date = date.Date(".").local(tz) + date_str = request.form.getfirst("date", current_date) display = request.form.getfirst("display", date_str) template = request.form.getfirst("@template", "calendar") form = request.form.getfirst("form") Modified: tracker/roundup-src/roundup/cgi/wsgi_handler.py ============================================================================== --- tracker/roundup-src/roundup/cgi/wsgi_handler.py (original) +++ tracker/roundup-src/roundup/cgi/wsgi_handler.py Thu Aug 4 15:46:52 2011 @@ -10,7 +10,7 @@ import roundup.instance from roundup.cgi import TranslationService -from BaseHTTPServer import BaseHTTPRequestHandler +from BaseHTTPServer import BaseHTTPRequestHandler, DEFAULT_ERROR_MESSAGE class Writer(object): @@ -43,6 +43,14 @@ request.wfile = Writer(request) request.__wfile = None + if environ ['REQUEST_METHOD'] == 'OPTIONS': + code = 501 + message, explain = BaseHTTPRequestHandler.responses[code] + request.start_response([('Content-Type', 'text/html'), + ('Connection', 'close')], code) + request.wfile.write(DEFAULT_ERROR_MESSAGE % locals()) + return [] + tracker = roundup.instance.open(self.home, not self.debug) # need to strip the leading '/' @@ -65,9 +73,9 @@ def start_response(self, headers, response_code): """Set HTTP response code""" - description = BaseHTTPRequestHandler.responses[response_code] + message, explain = BaseHTTPRequestHandler.responses[response_code] self.__wfile = self.__start_response('%d %s'%(response_code, - description), headers) + message), headers) def get_wfile(self): if self.__wfile is None: Modified: tracker/roundup-src/roundup/configuration.py ============================================================================== --- tracker/roundup-src/roundup/configuration.py (original) +++ tracker/roundup-src/roundup/configuration.py Thu Aug 4 15:46:52 2011 @@ -537,6 +537,22 @@ "starting with python 2.5. Set this to a higher value if you\n" "get the error 'Error: field larger than field limit' during\n" "import."), + (IntegerNumberOption, 'password_pbkdf2_default_rounds', '10000', + "Sets the default number of rounds used when encoding passwords\n" + "using the PBKDF2 scheme. Set this to a higher value on faster\n" + "systems which want more security.\n" + "PBKDF2 (Password-Based Key Derivation Function) is a\n" + "password hashing mechanism that derives hash from the\n" + "password and a random salt. For authentication this process\n" + "is repeated with the same salt as in the stored hash.\n" + "If both hashes match, the authentication succeeds.\n" + "PBKDF2 supports a variable 'rounds' parameter which varies\n" + "the time-cost of calculating the hash - doubling the number\n" + "of rounds doubles the cpu time required to calculate it. The\n" + "purpose of this is to periodically adjust the rounds as CPUs\n" + "become faster. The currently enforced minimum number of\n" + "rounds is 1000.\n" + "See: http://en.wikipedia.org/wiki/PBKDF2 and RFC2898"), )), ("tracker", ( (Option, "name", "Roundup issue tracker", @@ -579,6 +595,10 @@ "Setting this option makes Roundup display error tracebacks\n" "in the user's browser rather than emailing them to the\n" "tracker admin."), + (BooleanOption, "migrate_passwords", "yes", + "Setting this option makes Roundup migrate passwords with\n" + "an insecure password-scheme to a more secure scheme\n" + "when the user logs in via the web-interface."), )), ("rdbms", ( (Option, 'name', 'roundup', @@ -604,8 +624,30 @@ (NullableOption, 'read_default_group', 'roundup', "Name of the group to use in the MySQL defaults file (.my.cnf).\n" "Only used in MySQL connections."), + (IntegerNumberOption, 'sqlite_timeout', '30', + "Number of seconds to wait when the SQLite database is locked\n" + "Default: use a 30 second timeout (extraordinarily generous)\n" + "Only used in SQLite connections."), (IntegerNumberOption, 'cache_size', '100', "Size of the node cache (in elements)"), + (BooleanOption, "allow_create", "yes", + "Setting this option to 'no' protects the database against table creations."), + (BooleanOption, "allow_alter", "yes", + "Setting this option to 'no' protects the database against table alterations."), + (BooleanOption, "allow_drop", "yes", + "Setting this option to 'no' protects the database against table drops."), + (NullableOption, 'template', '', + "Name of the PostgreSQL template for database creation.\n" + "For database creation the template used has to match\n" + "the character encoding used (UTF8), there are different\n" + "PostgreSQL installations using different templates with\n" + "different encodings. If you get an error:\n" + " new encoding (UTF8) is incompatible with the encoding of\n" + " the template database (SQL_ASCII)\n" + " HINT: Use the same encoding as in the template database,\n" + " or use template0 as template.\n" + "then set this option to the template name given in the\n" + "error message."), ), "Settings in this section are used" " by RDBMS backends only" ), @@ -725,6 +767,10 @@ "will match an issue for the interval after the issue's\n" "creation or last activity. The interval is a standard\n" "Roundup interval."), + (BooleanOption, "subject_updates_title", "yes", + "Update issue title if incoming subject of email is different.\n" + "Setting this to \"no\" will ignore the title part of" + " the subject\nof incoming email messages.\n"), (RegExpOption, "refwd_re", "(\s*\W?\s*(fw|fwd|re|aw|sv|ang)\W)+", "Regular expression matching a single reply or forward\n" "prefix prepended by the mailer. This is explicitly\n" @@ -740,6 +786,10 @@ "Regular expression matching end of line."), (RegExpOption, "blankline_re", r"[\r\n]+\s*[\r\n]+", "Regular expression matching a blank line."), + (BooleanOption, "unpack_rfc822", "no", + "Unpack attached messages (encoded as message/rfc822 in MIME)\n" + "as multiple parts attached as files to the issue, if not\n" + "set we handle message/rfc822 attachments as a single file."), (BooleanOption, "ignore_alternatives", "no", "When parsing incoming mails, roundup uses the first\n" "text/plain part it finds. If this part is inside a\n" @@ -1249,6 +1299,14 @@ if home_dir is None: self.init_logging() + def copy(self): + new = CoreConfig() + new.sections = list(self.sections) + new.section_descriptions = dict(self.section_descriptions) + new.section_options = dict(self.section_options) + new.options = dict(self.options) + return new + def _get_unset_options(self): need_set = Config._get_unset_options(self) # remove MAIL_PASSWORD if MAIL_USER is empty @@ -1278,8 +1336,8 @@ return _file = self["LOGGING_FILENAME"] - # set file & level on the root logger - logger = logging.getLogger() + # set file & level on the roundup logger + logger = logging.getLogger('roundup') if _file: hdlr = logging.FileHandler(_file) else: @@ -1288,6 +1346,9 @@ '%(asctime)s %(levelname)s %(message)s') hdlr.setFormatter(formatter) # no logging API to remove all existing handlers!?! + for h in logger.handlers: + h.close() + logger.removeHandler(hdlr) logger.handlers = [hdlr] logger.setLevel(logging._levelNames[self["LOGGING_LEVEL"] or "ERROR"]) Modified: tracker/roundup-src/roundup/date.py ============================================================================== --- tracker/roundup-src/roundup/date.py (original) +++ tracker/roundup-src/roundup/date.py Thu Aug 4 15:46:52 2011 @@ -249,14 +249,22 @@ serving as translation functions. """ self.setTranslator(translator) + # Python 2.3+ datetime object + # common case when reading from database: avoid double-conversion + if isinstance(spec, datetime.datetime): + if offset == 0: + self.year, self.month, self.day, self.hour, self.minute, \ + self.second = spec.timetuple()[:6] + else: + TZ = get_timezone(tz) + self.year, self.month, self.day, self.hour, self.minute, \ + self.second = TZ.localize(spec).utctimetuple()[:6] + self.second += spec.microsecond/1000000. + return + if type(spec) == type(''): self.set(spec, offset=offset, add_granularity=add_granularity) return - elif isinstance(spec, datetime.datetime): - # Python 2.3+ datetime object - y,m,d,H,M,S,x,x,x = spec.timetuple() - S += spec.microsecond/1000000. - spec = (y,m,d,H,M,S,x,x,x) elif hasattr(spec, 'tuple'): spec = spec.tuple() elif isinstance(spec, Date): @@ -522,6 +530,7 @@ def local(self, offset): """ Return this date as yyyy-mm-dd.hh:mm:ss in a local time zone. + The offset is a pytz tz offset if pytz is installed. """ y, m, d, H, M, S = _utc_to_local(self.year, self.month, self.day, self.hour, self.minute, self.second, offset) @@ -718,14 +727,11 @@ def __cmp__(self, other): """Compare this interval to another interval.""" + if other is None: # we are always larger than None return 1 - for attr in 'sign year month day hour minute second'.split(): - r = cmp(getattr(self, attr), getattr(other, attr)) - if r: - return r - return 0 + return cmp(self.as_seconds(), other.as_seconds()) def __str__(self): """Return this interval as a string.""" Modified: tracker/roundup-src/roundup/dist/command/build.py ============================================================================== --- tracker/roundup-src/roundup/dist/command/build.py (original) +++ tracker/roundup-src/roundup/dist/command/build.py Thu Aug 4 15:46:52 2011 @@ -32,31 +32,29 @@ manifest = [l.strip() for l in f.readlines()] finally: f.close() - err = [line for line in manifest if not os.path.exists(line)] - err.sort() + err = set([line for line in manifest if not os.path.exists(line)]) # ignore auto-generated files - if err == ['roundup-admin', 'roundup-demo', 'roundup-gettext', - 'roundup-mailgw', 'roundup-server']: - err = [] + err = err - set(['roundup-admin', 'roundup-demo', 'roundup-gettext', + 'roundup-mailgw', 'roundup-server', 'roundup-xmlrpc-server']) if err: n = len(manifest) print '\n*** SOURCE WARNING: There are files missing (%d/%d found)!'%( n-len(err), n) print 'Missing:', '\nMissing: '.join(err) +def build_message_files(command): + """For each locale/*.po, build .mo file in target locale directory""" + for (_src, _dst) in list_message_files(): + _build_dst = os.path.join("build", _dst) + command.mkpath(os.path.dirname(_build_dst)) + command.announce("Compiling %s -> %s" % (_src, _build_dst)) + msgfmt.make(_src, _build_dst) -class build(base): - def build_message_files(self): - """For each locale/*.po, build .mo file in target locale directory""" - for (_src, _dst) in list_message_files(): - _build_dst = os.path.join("build", _dst) - self.mkpath(os.path.dirname(_build_dst)) - self.announce("Compiling %s -> %s" % (_src, _build_dst)) - msgfmt.make(_src, _build_dst) +class build(base): def run(self): check_manifest() - self.build_message_files() + build_message_files(self) base.run(self) Added: tracker/roundup-src/roundup/dist/command/install_lib.py ============================================================================== --- (empty file) +++ tracker/roundup-src/roundup/dist/command/install_lib.py Thu Aug 4 15:46:52 2011 @@ -0,0 +1,9 @@ +from roundup.dist.command.build import build_message_files, check_manifest +from distutils.command.install_lib import install_lib as base + +class install_lib(base): + + def run(self): + check_manifest() + build_message_files(self) + base.run(self) Modified: tracker/roundup-src/roundup/hyperdb.py ============================================================================== --- tracker/roundup-src/roundup/hyperdb.py (original) +++ tracker/roundup-src/roundup/hyperdb.py Thu Aug 4 15:46:52 2011 @@ -35,11 +35,15 @@ # class _Type(object): """A roundup property type.""" - def __init__(self, required=False): + def __init__(self, required=False, default_value = None): self.required = required + self.__default_value = default_value def __repr__(self): ' more useful for dumps ' return '<%s.%s>'%(self.__class__.__module__, self.__class__.__name__) + def get_default_value(self): + """The default value when creating a new instance of this property.""" + return self.__default_value def sort_repr (self, cls, val, name): """Representation used for sorting. This should be a python built-in type, otherwise sorting will take ages. Note that @@ -50,8 +54,8 @@ class String(_Type): """An object designating a String property.""" - def __init__(self, indexme='no', required=False): - super(String, self).__init__(required) + def __init__(self, indexme='no', required=False, default_value = ""): + super(String, self).__init__(required, default_value) self.indexme = indexme == 'yes' def from_raw(self, value, propname='', **kw): """fix the CRLF/CR -> LF stuff""" @@ -72,24 +76,12 @@ def from_raw(self, value, **kw): if not value: return None - m = password.Password.pwre.match(value) - if m: - # password is being given to us encrypted - p = password.Password() - p.scheme = m.group(1) - if p.scheme not in 'SHA crypt plaintext'.split(): - raise HyperdbValueError, \ - ('property %s: unknown encryption scheme %r') %\ - (kw['propname'], p.scheme) - p.password = m.group(2) - value = p - else: - try: - value = password.Password(value) - except password.PasswordValueError, message: - raise HyperdbValueError, \ - _('property %s: %s')%(kw['propname'], message) - return value + try: + return password.Password(encrypted=value, strict=True) + except password.PasswordValueError, message: + raise HyperdbValueError, \ + _('property %s: %s')%(kw['propname'], message) + def sort_repr (self, cls, val, name): if not val: return val @@ -97,8 +89,9 @@ class Date(_Type): """An object designating a Date property.""" - def __init__(self, offset=None, required=False): - super(Date, self).__init__(required) + def __init__(self, offset=None, required=False, default_value = None): + super(Date, self).__init__(required = required, + default_value = default_value) self._offset = offset def offset(self, db): if self._offset is not None: @@ -136,10 +129,11 @@ class _Pointer(_Type): """An object designating a Pointer property that links or multilinks to a node in a specified class.""" - def __init__(self, classname, do_journal='yes', required=False): + def __init__(self, classname, do_journal='yes', required=False, + default_value = None): """ Default is to journal link and unlink events """ - super(_Pointer, self).__init__(required) + super(_Pointer, self).__init__(required, default_value) self.classname = classname self.do_journal = do_journal == 'yes' def __repr__(self): @@ -175,6 +169,14 @@ "do_journal" indicates whether the linked-to nodes should have 'link' and 'unlink' events placed in their journal """ + + def __init__(self, classname, do_journal = 'yes', required = False): + + super(Multilink, self).__init__(classname, + do_journal, + required = required, + default_value = []) + def from_raw(self, value, db, klass, propname, itemid, **kw): if not value: return [] @@ -284,18 +286,17 @@ """ Simple tree data structure for optimizing searching of properties. Each node in the tree represents a roundup Class Property that has to be navigated for finding the given search - or sort properties. The sort_type attribute is used for - distinguishing nodes in the tree used for sorting or searching: If - it is 0 for a node, that node is not used for sorting. If it is 1, - it is used for both, sorting and searching. If it is 2 it is used - for sorting only. + or sort properties. The need_for attribute is used for + distinguishing nodes in the tree used for sorting, searching or + retrieval: The attribute is a dictionary containing one or several + of the values 'sort', 'search', 'retrieve'. The Proptree is also used for transitively searching attributes for backends that do not support transitive search (e.g. anydbm). The _val attribute with set_val is used for this. """ - def __init__(self, db, cls, name, props, parent = None): + def __init__(self, db, cls, name, props, parent=None, retr=False): self.db = db self.name = name self.props = props @@ -308,7 +309,7 @@ self.children = [] self.sortattr = [] self.propdict = {} - self.sort_type = 0 + self.need_for = {'search' : True} self.sort_direction = None self.sort_ids = None self.sort_ids_needed = False @@ -317,6 +318,7 @@ self.tree_sort_done = False self.propclass = None self.orderby = [] + self.sql_idx = None # index of retrieved column in sql result if parent: self.root = parent.root self.depth = parent.depth + 1 @@ -324,7 +326,7 @@ self.root = self self.seqno = 1 self.depth = 0 - self.sort_type = 1 + self.need_for['sort'] = True self.id = self.root.seqno self.root.seqno += 1 if self.cls: @@ -332,15 +334,18 @@ self.uniqname = '%s%s' % (self.cls.classname, self.id) if not self.parent: self.uniqname = self.cls.classname + if retr: + self.append_retr_props() - def append(self, name, sort_type = 0): + def append(self, name, need_for='search', retr=False): """Append a property to self.children. Will create a new propclass for the child. """ if name in self.propdict: pt = self.propdict[name] - if sort_type and not pt.sort_type: - pt.sort_type = 1 + pt.need_for[need_for] = True + if retr and isinstance(pt.propclass, Link): + pt.append_retr_props() return pt propclass = self.props[name] cls = None @@ -349,15 +354,24 @@ cls = self.db.getclass(propclass.classname) props = cls.getprops() child = self.__class__(self.db, cls, name, props, parent = self) - child.sort_type = sort_type + child.need_for = {need_for : True} child.propclass = propclass self.children.append(child) self.propdict[name] = child + if retr and isinstance(child.propclass, Link): + child.append_retr_props() return child + def append_retr_props(self): + """Append properties for retrieval.""" + for name, prop in self.cls.getprops(protected=1).iteritems(): + if isinstance(prop, Multilink): + continue + self.append(name, need_for='retrieve') + def compute_sort_done(self, mlseen=False): """ Recursively check if attribute is needed for sorting - (self.sort_type > 0) or all children have tree_sort_done set and + ('sort' in self.need_for) or all children have tree_sort_done set and sort_ids_needed unset: set self.tree_sort_done if one of the conditions holds. Also remove sort_ids_needed recursively once having seen a Multilink. @@ -371,7 +385,7 @@ p.compute_sort_done(mlseen) if not p.tree_sort_done: self.tree_sort_done = False - if not self.sort_type: + if 'sort' not in self.need_for: self.tree_sort_done = True if mlseen: self.tree_sort_done = False @@ -389,7 +403,7 @@ """ filterspec = {} for p in self.children: - if p.sort_type < 2: + if 'search' in p.need_for: if p.children: p.search(sort = False) filterspec[p.name] = p.val @@ -413,7 +427,7 @@ too. """ return [p for p in self.children - if p.sort_type > 0 and (intermediate or p.sort_direction)] + if 'sort' in p.need_for and (intermediate or p.sort_direction)] def __iter__(self): """ Yield nodes in depth-first order -- visited nodes first """ @@ -534,7 +548,6 @@ curdir = sa.sort_direction idx += 1 sortattr.append (val) - #print >> sys.stderr, "\nsortattr", sortattr sortattr = zip (*sortattr) for dir, i in reversed(zip(directions, dir_idx)): rev = dir == '-' @@ -760,6 +773,16 @@ """ +def iter_roles(roles): + ''' handle the text processing of turning the roles list + into something python can use more easily + ''' + if not roles or not roles.strip(): + raise StopIteration, "Empty roles given" + for role in [x.lower().strip() for x in roles.split(',')]: + yield role + + # # The base Class class # @@ -928,7 +951,9 @@ 'date' is a Timestamp object specifying the time of the change and 'tag' is the journaltag specified when the database was opened. """ - raise NotImplementedError + if not self.do_journal: + raise ValueError('Journalling is disabled for this class') + return self.db.getjournal(self.classname, nodeid) # Locating nodes: def hasnode(self, nodeid): @@ -1045,27 +1070,40 @@ """ raise NotImplementedError - def _proptree(self, filterspec, sortattr=[]): + def _proptree(self, filterspec, sortattr=[], retr=False): """Build a tree of all transitive properties in the given filterspec. + If we retrieve (retr is True) linked items we don't follow + across multilinks. We also don't follow if the searched value + can contain NULL values. """ - proptree = Proptree(self.db, self, '', self.getprops()) + proptree = Proptree(self.db, self, '', self.getprops(), retr=retr) for key, v in filterspec.iteritems(): keys = key.split('.') p = proptree + mlseen = False for k in keys: - p = p.append(k) + if isinstance (p.propclass, Multilink): + mlseen = True + isnull = v == '-1' or v is None + nullin = isinstance(v, type([])) and ('-1' in v or None in v) + r = retr and not mlseen and not isnull and not nullin + p = p.append(k, retr=r) p.val = v multilinks = {} for s in sortattr: keys = s[1].split('.') p = proptree + mlseen = False for k in keys: - p = p.append(k, sort_type = 2) + if isinstance (p.propclass, Multilink): + mlseen = True + r = retr and not mlseen + p = p.append(k, need_for='sort', retr=r) if isinstance (p.propclass, Multilink): multilinks[p] = True if p.cls: - p = p.append(p.cls.orderprop(), sort_type = 2) + p = p.append(p.cls.orderprop(), need_for='sort') if p.sort_direction: # if an orderprop is also specified explicitly continue p.sort_direction = s[0] @@ -1091,7 +1129,7 @@ for k in propname_path.split('.'): try: prop = props[k] - except KeyError, TypeError: + except (KeyError, TypeError): return default cl = getattr(prop, 'classname', None) props = None @@ -1148,7 +1186,7 @@ This implements a non-optimized version of Transitive search using _filter implemented in a backend class. A more efficient version can be implemented in the individual backends -- e.g., - an SQL backen will want to create a single SQL statement and + an SQL backend will want to create a single SQL statement and override the filter method instead of implementing _filter. """ sortattr = self._sortattr(sort = sort, group = group) @@ -1156,6 +1194,13 @@ proptree.search(search_matches) return proptree.sort() + # non-optimized filter_iter, a backend may chose to implement a + # better version that provides a real iterator that pre-fills the + # cache for each id returned. Note that the filter_iter doesn't + # promise to correctly sort by multilink (which isn't sane to do + # anyway). + filter_iter = filter + def count(self): """Get the number of nodes in this class. @@ -1228,6 +1273,83 @@ propnames.sort() return propnames + def import_journals(self, entries): + """Import a class's journal. + + Uses setjournal() to set the journal for each item. + Strategy for import: Sort first by id, then import journals for + each id, this way the memory footprint is a lot smaller than the + initial implementation which stored everything in a big hash by + id and then proceeded to import journals for each id.""" + properties = self.getprops() + a = [] + for l in entries: + # first element in sorted list is the (numeric) id + # in python2.4 and up we would use sorted with a key... + a.append ((int (l [0].strip ("'")), l)) + a.sort () + + + last = 0 + r = [] + for n, l in a: + nodeid, jdate, user, action, params = map(eval, l) + assert (str(n) == nodeid) + if n != last: + if r: + self.db.setjournal(self.classname, str(last), r) + last = n + r = [] + + if action == 'set': + for propname, value in params.iteritems(): + prop = properties[propname] + if value is None: + pass + elif isinstance(prop, Date): + value = date.Date(value) + elif isinstance(prop, Interval): + value = date.Interval(value) + elif isinstance(prop, Password): + value = password.JournalPassword(encrypted=value) + params[propname] = value + elif action == 'create' and params: + # old tracker with data stored in the create! + params = {} + r.append((nodeid, date.Date(jdate), user, action, params)) + if r: + self.db.setjournal(self.classname, nodeid, r) + + # + # convenience methods + # + def get_roles(self, nodeid): + """Return iterator for all roles for this nodeid. + + Yields string-processed roles. + This method can be overridden to provide a hook where we can + insert other permission models (e.g. get roles from database) + In standard schemas only a user has a roles property but + this may be different in customized schemas. + Note that this is the *central place* where role + processing happens! + """ + node = self.db.getnode(self.classname, nodeid) + return iter_roles(node['roles']) + + def has_role(self, nodeid, *roles): + '''See if this node has any roles that appear in roles. + + For convenience reasons we take a list. + In standard schemas only a user has a roles property but + this may be different in customized schemas. + ''' + roles = dict.fromkeys ([r.strip().lower() for r in roles]) + for role in self.get_roles(nodeid): + if role in roles: + return True + return False + class HyperdbValueError(ValueError): """ Error converting a raw value into a Hyperdb value """ Modified: tracker/roundup-src/roundup/init.py ============================================================================== --- tracker/roundup-src/roundup/init.py (original) +++ tracker/roundup-src/roundup/init.py Thu Aug 4 15:46:52 2011 @@ -176,10 +176,12 @@ finally: f.close() -def write_select_db(instance_home, backend): +def write_select_db(instance_home, backend, dbdir = 'db'): ''' Write the file that selects the backend for the tracker ''' - dbdir = os.path.join(instance_home, 'db') + # dbdir may be a relative pathname, os.path.join does the right + # thing when the second component of a join is an absolute path + dbdir = os.path.join (instance_home, dbdir) if not os.path.exists(dbdir): os.makedirs(dbdir) f = open(os.path.join(dbdir, 'backend_name'), 'w') Modified: tracker/roundup-src/roundup/instance.py ============================================================================== --- tracker/roundup-src/roundup/instance.py (original) +++ tracker/roundup-src/roundup/instance.py Thu Aug 4 15:46:52 2011 @@ -16,9 +16,15 @@ # SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS. # -"""Tracker handling (open tracker). +"""Top-level tracker interface. -Backwards compatibility for the old-style "imported" trackers. +Open a tracker with: + + >>> from roundup import instance + >>> db = instance.open('path to tracker home') + +The "db" handle you get back is the tracker's hyperdb which has the interface +described in `roundup.hyperdb.Database`. """ __docformat__ = 'restructuredtext' @@ -80,8 +86,7 @@ sys.path.remove(libdir) def get_backend_name(self): - o = __builtins__['open'] - f = o(os.path.join(self.tracker_home, 'db', 'backend_name')) + f = file(os.path.join(self.config.DATABASE, 'backend_name')) name = f.readline().strip() f.close() return name @@ -107,6 +112,9 @@ 'db': backend.Database(self.config, name) } + libdir = os.path.join(self.tracker_home, 'lib') + if os.path.isdir(libdir): + sys.path.insert(1, libdir) if self.optimize: # execute preloaded schema object exec(self.schema, vars) @@ -115,9 +123,6 @@ # use preloaded detectors detectors = self.detectors else: - libdir = os.path.join(self.tracker_home, 'lib') - if os.path.isdir(libdir): - sys.path.insert(1, libdir) # execute the schema file self._load_python('schema.py', vars) if callable (self.schema_hook): @@ -126,8 +131,8 @@ for extension in self.get_extensions('extensions'): extension(self) detectors = self.get_extensions('detectors') - if libdir in sys.path: - sys.path.remove(libdir) + if libdir in sys.path: + sys.path.remove(libdir) db = vars['db'] # apply the detectors for detector in detectors: Modified: tracker/roundup-src/roundup/mailer.py ============================================================================== --- tracker/roundup-src/roundup/mailer.py (original) +++ tracker/roundup-src/roundup/mailer.py Thu Aug 4 15:46:52 2011 @@ -9,7 +9,7 @@ from roundup import __version__ from roundup.date import get_timezone -from email.Utils import formatdate, formataddr +from email.Utils import formatdate, formataddr, specialsre, escapesre from email.Message import Message from email.Header import Header from email.Charset import Charset @@ -26,6 +26,25 @@ del msg['Content-Transfer-Encoding'] msg['Content-Transfer-Encoding'] = 'quoted-printable' +def nice_sender_header(name, address, charset): + # construct an address header so it's as human-readable as possible + # even in the presence of a non-ASCII name part + if not name: + return address + try: + encname = name.encode('ASCII') + except UnicodeEncodeError: + # use Header to encode correctly. + encname = Header(name, charset=charset).encode() + + # the important bits of formataddr() + if specialsre.search(encname): + encname = '"%s"'%escapesre.sub(r'\\\g<0>', encname) + + # now format the header as a string - don't return a Header as anonymous + # headers play poorly with Messages (eg. won't get wrapped properly) + return '%s <%s>'%(encname, address) + class Mailer: """Roundup-specific mail sending.""" def __init__(self, config): @@ -62,14 +81,15 @@ charset = getattr(self.config, 'EMAIL_CHARSET', 'utf-8') tracker_name = unicode(self.config.TRACKER_NAME, 'utf-8') if not author: - author = formataddr((tracker_name, self.config.ADMIN_EMAIL)) + author = (tracker_name, self.config.ADMIN_EMAIL) + name = author[0] else: name = unicode(author[0], 'utf-8') try: name.encode('ascii') except UnicodeError: name = Charset(charset).header_encode(name.encode(charset)) - author = formataddr((name, author[1])) + author = nice_sender_header(name, author[1], charset) if multipart: message = MIMEMultipart() @@ -82,9 +102,7 @@ except UnicodeError: message['Subject'] = Header(subject, charset) message['To'] = ', '.join(to) - # This should not fail, since we already encoded non-ASCII - # name characters - message['From'] = author.encode('ascii') + message['From'] = author message['Date'] = formatdate(localtime=True) # add a Precedence header so autoresponders ignore us @@ -181,17 +199,22 @@ content = '\n'.join(traceback.format_exception(*sys.exc_info())) self.standard_message(to, subject, data+content) - def smtp_send(self, to, message): + def smtp_send(self, to, message, sender=None): """Send a message over SMTP, using roundup's config. Arguments: - to: a list of addresses usable by rfc822.parseaddr(). - message: a StringIO instance with a full message. + - sender: if not 'None', the email address to use as the + envelope sender. If 'None', the admin email is used. """ + + if not sender: + sender = self.config.ADMIN_EMAIL if self.debug: # don't send - just write to a file open(self.debug, 'a').write('FROM: %s\nTO: %s\n%s\n' % - (self.config.ADMIN_EMAIL, + (sender, ', '.join(to), message)) else: # now try to send the message @@ -199,7 +222,7 @@ # send the message as admin so bounces are sent there # instead of to roundup smtp = SMTPConnection(self.config) - smtp.sendmail(self.config.ADMIN_EMAIL, to, message) + smtp.sendmail(sender, to, message) except socket.error, value: raise MessageSendError("Error: couldn't send email: " "mailhost %s"%value) Modified: tracker/roundup-src/roundup/mailgw.py ============================================================================== --- tracker/roundup-src/roundup/mailgw.py (original) +++ tracker/roundup-src/roundup/mailgw.py Thu Aug 4 15:46:52 2011 @@ -27,6 +27,9 @@ and given "file" class nodes that are linked to the "msg" node. . In a multipart/alternative message or part, we look for a text/plain subpart and ignore the other parts. + . A message/rfc822 is treated similar tomultipart/mixed (except for + special handling of the first text part) if unpack_rfc822 is set in + the mailgw config section. Summary ------- @@ -86,6 +89,7 @@ from roundup import configuration, hyperdb, date, password, rfc2822, exceptions from roundup.mailer import Mailer, MessageSendError from roundup.i18n import _ +from roundup.hyperdb import iter_roles try: import pyme, pyme.core, pyme.gpgme @@ -163,24 +167,6 @@ yield sig sig = sig.next - -def iter_roles(roles): - ''' handle the text processing of turning the roles list - into something python can use more easily - ''' - for role in [x.lower().strip() for x in roles.split(',')]: - yield role - -def user_has_role(db, userid, role_list): - ''' see if the given user has any roles that appear - in the role_list - ''' - for role in iter_roles(db.user.get(userid, 'roles')): - if role in iter_roles(role_list): - return True - return False - - def check_pgp_sigs(sig, gpgctx, author): ''' Theoretically a PGP message can have several signatures. GPGME returns status on all signatures in a linked list. Walk that @@ -261,41 +247,55 @@ parts.append(part) return parts - def getheader(self, name, default=None): - hdr = mimetools.Message.getheader(self, name, default) - if not hdr: - return '' - if hdr: - hdr = hdr.replace('\n','') # Inserted by rfc822.readheaders - # historically this method has returned utf-8 encoded string + def _decode_header_to_utf8(self, hdr): l = [] + prev_encoded = False for part, encoding in decode_header(hdr): if encoding: part = part.decode(encoding) + # RFC 2047 specifies that between encoded parts spaces are + # swallowed while at the borders from encoded to non-encoded + # or vice-versa we must preserve a space. Multiple adjacent + # non-encoded parts should not occur. + if l and prev_encoded != bool(encoding): + l.append(' ') + prev_encoded = bool(encoding) l.append(part) return ''.join([s.encode('utf-8') for s in l]) + def getheader(self, name, default=None): + hdr = mimetools.Message.getheader(self, name, default) + # TODO are there any other False values possible? + # TODO if not hdr: return hdr + if hdr is None: + return None + if not hdr: + return '' + if hdr: + hdr = hdr.replace('\n','') # Inserted by rfc822.readheaders + return self._decode_header_to_utf8(hdr) + def getaddrlist(self, name): # overload to decode the name part of the address l = [] for (name, addr) in mimetools.Message.getaddrlist(self, name): - p = [] - for part, encoding in decode_header(name): - if encoding: - part = part.decode(encoding) - p.append(part) - name = ''.join([s.encode('utf-8') for s in p]) + name = self._decode_header_to_utf8(name) l.append((name, addr)) return l def getname(self): """Find an appropriate name for this message.""" + name = None if self.gettype() == 'message/rfc822': # handle message/rfc822 specially - the name should be # the subject of the actual e-mail embedded here + # we add a '.eml' extension like other email software does it self.fp.seek(0) - name = Message(self.fp).getheader('subject') - else: + s = cStringIO.StringIO(self.getbody()) + name = Message(s).getheader('subject') + if name: + name = name + '.eml' + if not name: # try name on Content-Type name = self.getparam('name') if not name: @@ -368,8 +368,11 @@ # flagging. # multipart/form-data: # For web forms only. + # message/rfc822: + # Only if configured in [mailgw] unpack_rfc822 - def extract_content(self, parent_type=None, ignore_alternatives = False): + def extract_content(self, parent_type=None, ignore_alternatives=False, + unpack_rfc822=False): """Extract the body and the attachments recursively. If the content is hidden inside a multipart/alternative part, @@ -387,7 +390,7 @@ ig = ignore_alternatives and not content_found for part in self.getparts(): new_content, new_attach = part.extract_content(content_type, - not content and ig) + not content and ig, unpack_rfc822) # If we haven't found a text/plain part yet, take this one, # otherwise make it an attachment. @@ -412,6 +415,13 @@ attachments.extend(new_attach) if ig and content_type == 'multipart/alternative' and content: attachments = [] + elif unpack_rfc822 and content_type == 'message/rfc822': + s = cStringIO.StringIO(self.getbody()) + m = Message(s) + ig = ignore_alternatives and not content + new_content, attachments = m.extract_content(m.gettype(), ig, + unpack_rfc822) + attachments.insert(0, m.text_as_attachment()) elif (parent_type == 'multipart/signed' and content_type == 'application/pgp-signature'): # ignore it so it won't be saved as an attachment @@ -522,877 +532,1079 @@ result = context.op_verify_result() check_pgp_sigs(result.signatures, context, author) -class MailGW: +class parsedMessage: - def __init__(self, instance, arguments=()): - self.instance = instance - self.arguments = arguments - self.default_class = None - for option, value in self.arguments: - if option == '-c': - self.default_class = value.strip() + def __init__(self, mailgw, message): + self.mailgw = mailgw + self.config = mailgw.instance.config + self.db = mailgw.db + self.message = message + self.subject = message.getheader('subject', '') + self.has_prefix = False + self.matches = dict.fromkeys(['refwd', 'quote', 'classname', + 'nodeid', 'title', 'args', 'argswhole']) + self.from_list = message.getaddrlist('resent-from') \ + or message.getaddrlist('from') + self.pfxmode = self.config['MAILGW_SUBJECT_PREFIX_PARSING'] + self.sfxmode = self.config['MAILGW_SUBJECT_SUFFIX_PARSING'] + # these are filled in by subsequent parsing steps + self.classname = None + self.properties = None + self.cl = None + self.nodeid = None + self.author = None + self.recipients = None + self.msg_props = {} + self.props = None + self.content = None + self.attachments = None + + def handle_ignore(self): + ''' Check to see if message can be safely ignored: + detect loops and + Precedence: Bulk, or Microsoft Outlook autoreplies + ''' + if self.message.getheader('x-roundup-loop', ''): + raise IgnoreLoop + if (self.message.getheader('precedence', '') == 'bulk' + or self.subject.lower().find("autoreply") > 0): + raise IgnoreBulk - self.mailer = Mailer(instance.config) - self.logger = logging.getLogger('mailgw') + def handle_help(self): + ''' Check to see if the message contains a usage/help request + ''' + if self.subject.strip().lower() == 'help': + raise MailUsageHelp - # should we trap exceptions (normal usage) or pass them through - # (for testing) - self.trapExceptions = 1 + def check_subject(self): + ''' Check to see if the message contains a valid subject line + ''' + if not self.subject: + raise MailUsageError, _(""" +Emails to Roundup trackers must include a Subject: line! +""") - def do_pipe(self): - """ Read a message from standard input and pass it to the mail handler. + def parse_subject(self): + ''' Matches subjects like: + Re: "[issue1234] title of issue [status=resolved]" + + Each part of the subject is matched, stored, then removed from the + start of the subject string as needed. The stored values are then + returned + ''' - Read into an internal structure that we can seek on (in case - there's an error). + tmpsubject = self.subject - XXX: we may want to read this into a temporary file instead... - """ - s = cStringIO.StringIO() - s.write(sys.stdin.read()) - s.seek(0) - self.main(s) - return 0 + sd_open, sd_close = self.config['MAILGW_SUBJECT_SUFFIX_DELIMITERS'] + delim_open = re.escape(sd_open) + if delim_open in '[(': delim_open = '\\' + delim_open + delim_close = re.escape(sd_close) + if delim_close in '[(': delim_close = '\\' + delim_close - def do_mailbox(self, filename): - """ Read a series of messages from the specified unix mailbox file and - pass each to the mail handler. - """ - # open the spool file and lock it - import fcntl - # FCNTL is deprecated in py2.3 and fcntl takes over all the symbols - if hasattr(fcntl, 'LOCK_EX'): - FCNTL = fcntl + # Look for Re: et. al. Used later on for MAILGW_SUBJECT_CONTENT_MATCH + re_re = r"(?P%s)\s*" % self.config["MAILGW_REFWD_RE"].pattern + m = re.match(re_re, tmpsubject, re.IGNORECASE|re.VERBOSE|re.UNICODE) + if m: + m = m.groupdict() + if m['refwd']: + self.matches.update(m) + tmpsubject = tmpsubject[len(m['refwd']):] # Consume Re: + + # Look for Leading " + m = re.match(r'(?P\s*")', tmpsubject, + re.IGNORECASE) + if m: + self.matches.update(m.groupdict()) + tmpsubject = tmpsubject[len(self.matches['quote']):] # Consume quote + + # Check if the subject includes a prefix + self.has_prefix = re.search(r'^%s(\w+)%s'%(delim_open, + delim_close), tmpsubject.strip()) + + # Match the classname if specified + class_re = r'%s(?P(%s))(?P\d+)?%s'%(delim_open, + "|".join(self.db.getclasses()), delim_close) + # Note: re.search, not re.match as there might be garbage + # (mailing list prefix, etc.) before the class identifier + m = re.search(class_re, tmpsubject, re.IGNORECASE) + if m: + self.matches.update(m.groupdict()) + # Skip to the end of the class identifier, including any + # garbage before it. + + tmpsubject = tmpsubject[m.end():] + + # Match the title of the subject + # if we've not found a valid classname prefix then force the + # scanning to handle there being a leading delimiter + title_re = r'(?P%s[^%s]*)'%( + not self.matches['classname'] and '.' or '', delim_open) + m = re.match(title_re, tmpsubject.strip(), re.IGNORECASE) + if m: + self.matches.update(m.groupdict()) + tmpsubject = tmpsubject[len(self.matches['title']):] # Consume title + + if self.matches['title']: + self.matches['title'] = self.matches['title'].strip() else: - import FCNTL - f = open(filename, 'r+') - fcntl.flock(f.fileno(), FCNTL.LOCK_EX) + self.matches['title'] = '' - # handle and clear the mailbox - try: - from mailbox import UnixMailbox - mailbox = UnixMailbox(f, factory=Message) - # grab one message - message = mailbox.next() - while message: - # handle this message - self.handle_Message(message) - message = mailbox.next() - # nuke the file contents - os.ftruncate(f.fileno(), 0) - except: - import traceback - traceback.print_exc() - return 1 - fcntl.flock(f.fileno(), FCNTL.LOCK_UN) + # strip off the quotes that dumb emailers put around the subject, like + # Re: "[issue1] bla blah" + if self.matches['quote'] and self.matches['title'].endswith('"'): + self.matches['title'] = self.matches['title'][:-1] + + # Match any arguments specified + args_re = r'(?P<argswhole>%s(?P<args>.+?)%s)?'%(delim_open, + delim_close) + m = re.search(args_re, tmpsubject.strip(), re.IGNORECASE|re.VERBOSE) + if m: + self.matches.update(m.groupdict()) + + def rego_confirm(self): + ''' Check for registration OTK and confirm the registration if found + ''' + + if self.config['EMAIL_REGISTRATION_CONFIRMATION']: + otk_re = re.compile('-- key (?P<otk>[a-zA-Z0-9]{32})') + otk = otk_re.search(self.matches['title'] or '') + if otk: + self.db.confirm_registration(otk.group('otk')) + subject = 'Your registration to %s is complete' % \ + self.config['TRACKER_NAME'] + sendto = [self.from_list[0][1]] + self.mailgw.mailer.standard_message(sendto, subject, '') + return 1 return 0 - def do_imap(self, server, user='', password='', mailbox='', ssl=0): - ''' Do an IMAP connection + def get_classname(self): + ''' Determine the classname of the node being created/edited ''' - import getpass, imaplib, socket - try: - if not user: - user = raw_input('User: ') - if not password: - password = getpass.getpass() - except (KeyboardInterrupt, EOFError): - # Ctrl C or D maybe also Ctrl Z under Windows. - print "\nAborted by user." - return 1 - # open a connection to the server and retrieve all messages - try: - if ssl: - self.logger.debug('Trying server %r with ssl'%server) - server = imaplib.IMAP4_SSL(server) - else: - self.logger.debug('Trying server %r without ssl'%server) - server = imaplib.IMAP4(server) - except (imaplib.IMAP4.error, socket.error, socket.sslerror): - self.logger.exception('IMAP server error') - return 1 + subject = self.subject - try: - server.login(user, password) - except imaplib.IMAP4.error, e: - self.logger.exception('IMAP login failure') - return 1 + # get the classname + if self.pfxmode == 'none': + classname = None + else: + classname = self.matches['classname'] - try: - if not mailbox: - (typ, data) = server.select() - else: - (typ, data) = server.select(mailbox=mailbox) - if typ != 'OK': - self.logger.error('Failed to get mailbox %r: %s'%(mailbox, - data)) - return 1 - try: - numMessages = int(data[0]) - except ValueError, value: - self.logger.error('Invalid message count from mailbox %r'% - data[0]) - return 1 - for i in range(1, numMessages+1): - (typ, data) = server.fetch(str(i), '(RFC822)') + if not classname and self.has_prefix and self.pfxmode == 'strict': + raise MailUsageError, _(""" +The message you sent to roundup did not contain a properly formed subject +line. The subject must contain a class name or designator to indicate the +'topic' of the message. For example: + Subject: [issue] This is a new issue + - this will create a new issue in the tracker with the title 'This is + a new issue'. + Subject: [issue1234] This is a followup to issue 1234 + - this will append the message's contents to the existing issue 1234 + in the tracker. - # mark the message as deleted. - server.store(str(i), '+FLAGS', r'(\Deleted)') +Subject was: '%(subject)s' +""") % locals() - # process the message - s = cStringIO.StringIO(data[0][1]) - s.seek(0) - self.handle_Message(Message(s)) - server.close() - finally: + # try to get the class specified - if "loose" or "none" then fall + # back on the default + attempts = [] + if classname: + attempts.append(classname) + + if self.mailgw.default_class: + attempts.append(self.mailgw.default_class) + else: + attempts.append(self.config['MAILGW_DEFAULT_CLASS']) + + # first valid class name wins + self.cl = None + for trycl in attempts: try: - server.expunge() - except: + self.cl = self.db.getclass(trycl) + classname = self.classname = trycl + break + except KeyError: pass - server.logout() - - return 0 + if not self.cl: + validname = ', '.join(self.db.getclasses()) + if classname: + raise MailUsageError, _(""" +The class name you identified in the subject line ("%(classname)s") does +not exist in the database. - def do_apop(self, server, user='', password='', ssl=False): - ''' Do authentication POP - ''' - self._do_pop(server, user, password, True, ssl) +Valid class names are: %(validname)s +Subject was: "%(subject)s" +""") % locals() + else: + raise MailUsageError, _(""" +You did not identify a class name in the subject line and there is no +default set for this tracker. The subject must contain a class name or +designator to indicate the 'topic' of the message. For example: + Subject: [issue] This is a new issue + - this will create a new issue in the tracker with the title 'This is + a new issue'. + Subject: [issue1234] This is a followup to issue 1234 + - this will append the message's contents to the existing issue 1234 + in the tracker. - def do_pop(self, server, user='', password='', ssl=False): - ''' Do plain POP - ''' - self._do_pop(server, user, password, False, ssl) +Subject was: '%(subject)s' +""") % locals() + # get the class properties + self.properties = self.cl.getprops() + - def _do_pop(self, server, user, password, apop, ssl): - '''Read a series of messages from the specified POP server. + def get_nodeid(self): + ''' Determine the nodeid from the message and return it if found ''' - import getpass, poplib, socket - try: - if not user: - user = raw_input('User: ') - if not password: - password = getpass.getpass() - except (KeyboardInterrupt, EOFError): - # Ctrl C or D maybe also Ctrl Z under Windows. - print "\nAborted by user." - return 1 + title = self.matches['title'] + subject = self.subject + + if self.pfxmode == 'none': + nodeid = None + else: + nodeid = self.matches['nodeid'] - # open a connection to the server and retrieve all messages - try: - if ssl: - klass = poplib.POP3_SSL - else: - klass = poplib.POP3 - server = klass(server) - except socket.error: - self.logger.exception('POP server error') - return 1 - if apop: - server.apop(user, password) - else: - server.user(user) - server.pass_(password) - numMessages = len(server.list()[1]) - for i in range(1, numMessages+1): - # retr: returns - # [ pop response e.g. '+OK 459 octets', - # [ array of message lines ], - # number of octets ] - lines = server.retr(i)[1] - s = cStringIO.StringIO('\n'.join(lines)) - s.seek(0) - self.handle_Message(Message(s)) - # delete the message - server.dele(i) + # try in-reply-to to match the message if there's no nodeid + inreplyto = self.message.getheader('in-reply-to') or '' + if nodeid is None and inreplyto: + l = self.db.getclass('msg').stringFind(messageid=inreplyto) + if l: + nodeid = self.cl.filter(None, {'messages':l})[0] - # quit the server to commit changes. - server.quit() - return 0 - def main(self, fp): - ''' fp - the file from which to read the Message. - ''' - return self.handle_Message(Message(fp)) + # but we do need either a title or a nodeid... + if nodeid is None and not title: + raise MailUsageError, _(""" +I cannot match your message to a node in the database - you need to either +supply a full designator (with number, eg "[issue123]") or keep the +previous subject title intact so I can match that. - def handle_Message(self, message): - """Handle an RFC822 Message +Subject was: "%(subject)s" +""") % locals() - Handle the Message object by calling handle_message() and then cope - with any errors raised by handle_message. - This method's job is to make that call and handle any - errors in a sane manner. It should be replaced if you wish to - handle errors in a different manner. - """ - # in some rare cases, a particularly stuffed-up e-mail will make - # its way into here... try to handle it gracefully + # If there's no nodeid, check to see if this is a followup and + # maybe someone's responded to the initial mail that created an + # entry. Try to find the matching nodes with the same title, and + # use the _last_ one matched (since that'll _usually_ be the most + # recent...). The subject_content_match config may specify an + # additional restriction based on the matched node's creation or + # activity. + tmatch_mode = self.config['MAILGW_SUBJECT_CONTENT_MATCH'] + if tmatch_mode != 'never' and nodeid is None and self.matches['refwd']: + l = self.cl.stringFind(title=title) + limit = None + if (tmatch_mode.startswith('creation') or + tmatch_mode.startswith('activity')): + limit, interval = tmatch_mode.split(' ', 1) + threshold = date.Date('.') - date.Interval(interval) + for id in l: + if limit: + if threshold < self.cl.get(id, limit): + nodeid = id + else: + nodeid = id - sendto = message.getaddrlist('resent-from') - if not sendto: - sendto = message.getaddrlist('from') - if not sendto: - # very bad-looking message - we don't even know who sent it - msg = ['Badly formed message from mail gateway. Headers:'] - msg.extend(message.headers) - msg = '\n'.join(map(str, msg)) - self.logger.error(msg) - return + # if a nodeid was specified, make sure it's valid + if nodeid is not None and not self.cl.hasnode(nodeid): + if self.pfxmode == 'strict': + raise MailUsageError, _(""" +The node specified by the designator in the subject of your message +("%(nodeid)s") does not exist. - msg = 'Handling message' - if message.getheader('message-id'): - msg += ' (Message-id=%r)'%message.getheader('message-id') - self.logger.info(msg) +Subject was: "%(subject)s" +""") % locals() + else: + nodeid = None + self.nodeid = nodeid - # try normal message-handling - if not self.trapExceptions: - return self.handle_message(message) + def get_author_id(self): + ''' Attempt to get the author id from the existing registered users, + otherwise attempt to register a new user and return their id + ''' + # Don't create users if anonymous isn't allowed to register + create = 1 + anonid = self.db.user.lookup('anonymous') + if not (self.db.security.hasPermission('Register', anonid, 'user') + and self.db.security.hasPermission('Email Access', anonid)): + create = 0 - # no, we want to trap exceptions - try: - return self.handle_message(message) - except MailUsageHelp: - # bounce the message back to the sender with the usage message - fulldoc = '\n'.join(string.split(__doc__, '\n')[2:]) - m = [''] - m.append('\n\nMail Gateway Help\n=================') - m.append(fulldoc) - self.mailer.bounce_message(message, [sendto[0][1]], m, - subject="Mail Gateway Help") - except MailUsageError, value: - # bounce the message back to the sender with the usage message - fulldoc = '\n'.join(string.split(__doc__, '\n')[2:]) - m = [''] - m.append(str(value)) - m.append('\n\nMail Gateway Help\n=================') - m.append(fulldoc) - self.mailer.bounce_message(message, [sendto[0][1]], m) - except Unauthorized, value: - # just inform the user that he is not authorized - m = [''] - m.append(str(value)) - self.mailer.bounce_message(message, [sendto[0][1]], m) - except IgnoreMessage: - # do not take any action - # this exception is thrown when email should be ignored - msg = 'IgnoreMessage raised' - if message.getheader('message-id'): - msg += ' (Message-id=%r)'%message.getheader('message-id') - self.logger.info(msg) - return - except: - msg = 'Exception handling message' - if message.getheader('message-id'): - msg += ' (Message-id=%r)'%message.getheader('message-id') - self.logger.exception(msg) + # ok, now figure out who the author is - create a new user if the + # "create" flag is true + author = uidFromAddress(self.db, self.from_list[0], create=create) - # bounce the message back to the sender with the error message - # let the admin know that something very bad is happening - m = [''] - m.append('An unexpected error occurred during the processing') - m.append('of your message. The tracker administrator is being') - m.append('notified.\n') - self.mailer.bounce_message(message, [sendto[0][1]], m) + # if we're not recognised, and we don't get added as a user, then we + # must be anonymous + if not author: + author = anonid - m.append('----------------') - m.append(traceback.format_exc()) - self.mailer.bounce_message(message, [self.instance.config.ADMIN_EMAIL], m) + # make sure the author has permission to use the email interface + if not self.db.security.hasPermission('Email Access', author): + if author == anonid: + # we're anonymous and we need to be a registered user + from_address = self.from_list[0][1] + registration_info = "" + if self.db.security.hasPermission('Web Access', author) and \ + self.db.security.hasPermission('Register', anonid, 'user'): + tracker_web = self.config.TRACKER_WEB + registration_info = """ Please register at: - def handle_message(self, message): - ''' message - a Message instance +%(tracker_web)suser?template=register - Parse the message as per the module docstring. - ''' - # get database handle for handling one email - self.db = self.instance.open ('admin') - try: - return self._handle_message (message) - finally: - self.db.close() +...before sending mail to the tracker.""" % locals() - def _handle_message(self, message): - ''' message - a Message instance + raise Unauthorized, _(""" +You are not a registered user.%(registration_info)s - Parse the message as per the module docstring. +Unknown address: %(from_address)s +""") % locals() + else: + # we're registered and we're _still_ not allowed access + raise Unauthorized, _( + 'You are not permitted to access this tracker.') + self.author = author - The implementation expects an opened database and a try/finally - that closes the database. + def check_node_permissions(self): + ''' Check if the author has permission to edit or create this + class of node ''' - # detect loops - if message.getheader('x-roundup-loop', ''): - raise IgnoreLoop - - # handle the subject line - subject = message.getheader('subject', '') - if not subject: - raise MailUsageError, _(""" -Emails to Roundup trackers must include a Subject: line! -""") + if self.nodeid: + if not self.db.security.hasPermission('Edit', self.author, + self.classname, itemid=self.nodeid): + raise Unauthorized, _( + 'You are not permitted to edit %(classname)s.' + ) % self.__dict__ + else: + if not self.db.security.hasPermission('Create', self.author, + self.classname): + raise Unauthorized, _( + 'You are not permitted to create %(classname)s.' + ) % self.__dict__ - # detect Precedence: Bulk, or Microsoft Outlook autoreplies - if (message.getheader('precedence', '') == 'bulk' - or subject.lower().find("autoreply") > 0): - raise IgnoreBulk + def commit_and_reopen_as_author(self): + ''' the author may have been created - make sure the change is + committed before we reopen the database + then re-open the database as the author + ''' + self.db.commit() - if subject.strip().lower() == 'help': - raise MailUsageHelp + # set the database user as the author + username = self.db.user.get(self.author, 'username') + self.db.setCurrentUser(username) - # config is used many times in this method. - # make local variable for easier access - config = self.instance.config - - # determine the sender's address - from_list = message.getaddrlist('resent-from') - if not from_list: - from_list = message.getaddrlist('from') + # re-get the class with the new database connection + self.cl = self.db.getclass(self.classname) - # XXX Don't enable. This doesn't work yet. -# "[^A-z.]tracker\+(?P<classname>[^\d\s]+)(?P<nodeid>\d+)\@some.dom.ain[^A-z.]" - # handle delivery to addresses like:tracker+issue25 at some.dom.ain - # use the embedded issue number as our issue -# issue_re = config['MAILGW_ISSUE_ADDRESS_RE'] -# if issue_re: -# for header in ['to', 'cc', 'bcc']: -# addresses = message.getheader(header, '') -# if addresses: -# # FIXME, this only finds the first match in the addresses. -# issue = re.search(issue_re, addresses, 'i') -# if issue: -# classname = issue.group('classname') -# nodeid = issue.group('nodeid') -# break - - # Matches subjects like: - # Re: "[issue1234] title of issue [status=resolved]" - - # Alias since we need a reference to the original subject for - # later use in error messages - tmpsubject = subject + def get_recipients(self): + ''' Get the list of recipients who were included in message and + register them as users if possible + ''' + # Don't create users if anonymous isn't allowed to register + create = 1 + anonid = self.db.user.lookup('anonymous') + if not (self.db.security.hasPermission('Register', anonid, 'user') + and self.db.security.hasPermission('Email Access', anonid)): + create = 0 - sd_open, sd_close = config['MAILGW_SUBJECT_SUFFIX_DELIMITERS'] - delim_open = re.escape(sd_open) - if delim_open in '[(': delim_open = '\\' + delim_open - delim_close = re.escape(sd_close) - if delim_close in '[(': delim_close = '\\' + delim_close + # get the user class arguments from the commandline + user_props = self.mailgw.get_class_arguments('user') - matches = dict.fromkeys(['refwd', 'quote', 'classname', - 'nodeid', 'title', 'args', - 'argswhole']) + # now update the recipients list + recipients = [] + tracker_email = self.config['TRACKER_EMAIL'].lower() + msg_to = self.message.getaddrlist('to') + msg_cc = self.message.getaddrlist('cc') + for recipient in msg_to + msg_cc: + r = recipient[1].strip().lower() + if r == tracker_email or not r: + continue - # Look for Re: et. al. Used later on for MAILGW_SUBJECT_CONTENT_MATCH - re_re = r"(?P<refwd>%s)\s*" % config["MAILGW_REFWD_RE"].pattern - m = re.match(re_re, tmpsubject, re.IGNORECASE|re.VERBOSE|re.UNICODE) - if m: - m = m.groupdict() - if m['refwd']: - matches.update(m) - tmpsubject = tmpsubject[len(m['refwd']):] # Consume Re: + # look up the recipient - create if necessary (and we're + # allowed to) + recipient = uidFromAddress(self.db, recipient, create, **user_props) - # Look for Leading " - m = re.match(r'(?P<quote>\s*")', tmpsubject, - re.IGNORECASE) - if m: - matches.update(m.groupdict()) - tmpsubject = tmpsubject[len(matches['quote']):] # Consume quote + # if all's well, add the recipient to the list + if recipient: + recipients.append(recipient) + self.recipients = recipients - has_prefix = re.search(r'^%s(\w+)%s'%(delim_open, - delim_close), tmpsubject.strip()) + def get_props(self): + ''' Generate all the props for the new/updated node and return them + ''' + subject = self.subject + + # get the commandline arguments for issues + issue_props = self.mailgw.get_class_arguments('issue', self.classname) + + # + # handle the subject argument list + # + # figure what the properties of this Class are + props = {} + args = self.matches['args'] + argswhole = self.matches['argswhole'] + title = self.matches['title'] + + # Reform the title + if self.matches['nodeid'] and self.nodeid is None: + title = subject + + if args: + if self.sfxmode == 'none': + title += ' ' + argswhole + else: + errors, props = setPropArrayFromString(self, self.cl, args, + self.nodeid) + # handle any errors parsing the argument list + if errors: + if self.sfxmode == 'strict': + errors = '\n- '.join(map(str, errors)) + raise MailUsageError, _(""" +There were problems handling your subject line argument list: +- %(errors)s - class_re = r'%s(?P<classname>(%s))(?P<nodeid>\d+)?%s'%(delim_open, - "|".join(self.db.getclasses()), delim_close) - # Note: re.search, not re.match as there might be garbage - # (mailing list prefix, etc.) before the class identifier - m = re.search(class_re, tmpsubject, re.IGNORECASE) - if m: - matches.update(m.groupdict()) - # Skip to the end of the class identifier, including any - # garbage before it. +Subject was: "%(subject)s" +""") % locals() + else: + title += ' ' + argswhole - tmpsubject = tmpsubject[m.end():] - # if we've not found a valid classname prefix then force the - # scanning to handle there being a leading delimiter - title_re = r'(?P<title>%s[^%s]+)'%( - not matches['classname'] and '.' or '', delim_open) - m = re.match(title_re, tmpsubject.strip(), re.IGNORECASE) - if m: - matches.update(m.groupdict()) - tmpsubject = tmpsubject[len(matches['title']):] # Consume title + # set the issue title to the subject + title = title.strip() + if (title and self.properties.has_key('title') and not + issue_props.has_key('title')): + issue_props['title'] = title + if (self.nodeid and self.properties.has_key('title') and not + self.config['MAILGW_SUBJECT_UPDATES_TITLE']): + issue_props['title'] = self.cl.get(self.nodeid,'title') + + # merge the command line props defined in issue_props into + # the props dictionary because function(**props, **issue_props) + # is a syntax error. + for prop in issue_props.keys() : + if not props.has_key(prop) : + props[prop] = issue_props[prop] + + self.props = props + + def get_pgp_message(self): + ''' If they've enabled PGP processing then verify the signature + or decrypt the message + ''' + def pgp_role(): + """ if PGP_ROLES is specified the user must have a Role in the list + or we will skip PGP processing + """ + if self.config.PGP_ROLES: + return self.db.user.has_role(self.author, + iter_roles(self.config.PGP_ROLES)) + else: + return True - args_re = r'(?P<argswhole>%s(?P<args>.+?)%s)?'%(delim_open, - delim_close) - m = re.search(args_re, tmpsubject.strip(), re.IGNORECASE|re.VERBOSE) - if m: - matches.update(m.groupdict()) + if self.config.PGP_ENABLE and pgp_role(): + assert pyme, 'pyme is not installed' + # signed/encrypted mail must come from the primary address + author_address = self.db.user.get(self.author, 'address') + if self.config.PGP_HOMEDIR: + os.environ['GNUPGHOME'] = self.config.PGP_HOMEDIR + if self.message.pgp_signed(): + self.message.verify_signature(author_address) + elif self.message.pgp_encrypted(): + # replace message with the contents of the decrypted + # message for content extraction + # TODO: encrypted message handling is far from perfect + # bounces probably include the decrypted message, for + # instance :( + self.message = self.message.decrypt(author_address) + else: + raise MailUsageError, _(""" +This tracker has been configured to require all email be PGP signed or +encrypted.""") - # figure subject line parsing modes - pfxmode = config['MAILGW_SUBJECT_PREFIX_PARSING'] - sfxmode = config['MAILGW_SUBJECT_SUFFIX_PARSING'] + def get_content_and_attachments(self): + ''' get the attachments and first text part from the message + ''' + ig = self.config.MAILGW_IGNORE_ALTERNATIVES + self.content, self.attachments = self.message.extract_content( + ignore_alternatives=ig, + unpack_rfc822=self.config.MAILGW_UNPACK_RFC822) + - # check for registration OTK - # or fallback on the default class - if self.db.config['EMAIL_REGISTRATION_CONFIRMATION']: - otk_re = re.compile('-- key (?P<otk>[a-zA-Z0-9]{32})') - otk = otk_re.search(matches['title'] or '') - if otk: - self.db.confirm_registration(otk.group('otk')) - subject = 'Your registration to %s is complete' % \ - config['TRACKER_NAME'] - sendto = [from_list[0][1]] - self.mailer.standard_message(sendto, subject, '') - return + def create_files(self): + ''' Create a file for each attachment in the message + ''' + if not self.properties.has_key('files'): + return + files = [] + file_props = self.mailgw.get_class_arguments('file') + + if self.attachments: + for (name, mime_type, data) in self.attachments: + if not self.db.security.hasPermission('Create', self.author, + 'file'): + raise Unauthorized, _( + 'You are not permitted to create files.') + if not name: + name = "unnamed" + try: + fileid = self.db.file.create(type=mime_type, name=name, + content=data, **file_props) + except exceptions.Reject: + pass + else: + files.append(fileid) + # allowed to attach the files to an existing node? + if self.nodeid and not self.db.security.hasPermission('Edit', + self.author, self.classname, 'files'): + raise Unauthorized, _( + 'You are not permitted to add files to %(classname)s.' + ) % self.__dict__ - # get the classname - if pfxmode == 'none': - classname = None - else: - classname = matches['classname'] + self.msg_props['files'] = files + if self.nodeid: + # extend the existing files list + fileprop = self.cl.get(self.nodeid, 'files') + fileprop.extend(files) + files = fileprop - if not classname and has_prefix and pfxmode == 'strict': - raise MailUsageError, _(""" -The message you sent to roundup did not contain a properly formed subject -line. The subject must contain a class name or designator to indicate the -'topic' of the message. For example: - Subject: [issue] This is a new issue - - this will create a new issue in the tracker with the title 'This is - a new issue'. - Subject: [issue1234] This is a followup to issue 1234 - - this will append the message's contents to the existing issue 1234 - in the tracker. + self.props['files'] = files -Subject was: '%(subject)s' -""") % locals() + def create_msg(self): + ''' Create msg containing all the relevant information from the message + ''' + if not self.properties.has_key('messages'): + return + msg_props = self.mailgw.get_class_arguments('msg') + self.msg_props.update (msg_props) + + # Get the message ids + inreplyto = self.message.getheader('in-reply-to') or '' + messageid = self.message.getheader('message-id') + # generate a messageid if there isn't one + if not messageid: + messageid = "<%s.%s.%s%s@%s>"%(time.time(), random.random(), + self.classname, self.nodeid, self.config['MAIL_DOMAIN']) + + if self.content is None: + raise MailUsageError, _(""" +Roundup requires the submission to be plain text. The message parser could +not find a text/plain part to use. +""") - # try to get the class specified - if "loose" or "none" then fall - # back on the default - attempts = [] - if classname: - attempts.append(classname) + # parse the body of the message, stripping out bits as appropriate + summary, content = parseContent(self.content, config=self.config) + content = content.strip() - if self.default_class: - attempts.append(self.default_class) - else: - attempts.append(config['MAILGW_DEFAULT_CLASS']) + if content: + if not self.db.security.hasPermission('Create', self.author, 'msg'): + raise Unauthorized, _( + 'You are not permitted to create messages.') - # first valid class name wins - cl = None - for trycl in attempts: try: - cl = self.db.getclass(trycl) - classname = trycl - break - except KeyError: - pass - - if not cl: - validname = ', '.join(self.db.getclasses()) - if classname: + message_id = self.db.msg.create(author=self.author, + recipients=self.recipients, date=date.Date('.'), + summary=summary, content=content, + messageid=messageid, inreplyto=inreplyto, **self.msg_props) + except exceptions.Reject, error: raise MailUsageError, _(""" -The class name you identified in the subject line ("%(classname)s") does -not exist in the database. - -Valid class names are: %(validname)s -Subject was: "%(subject)s" +Mail message was rejected by a detector. +%(error)s """) % locals() + # allowed to attach the message to the existing node? + if self.nodeid and not self.db.security.hasPermission('Edit', + self.author, self.classname, 'messages'): + raise Unauthorized, _( + 'You are not permitted to add messages to %(classname)s.' + ) % self.__dict__ + + if self.nodeid: + # add the message to the node's list + messages = self.cl.get(self.nodeid, 'messages') + messages.append(message_id) + self.props['messages'] = messages else: - raise MailUsageError, _(""" -You did not identify a class name in the subject line and there is no -default set for this tracker. The subject must contain a class name or -designator to indicate the 'topic' of the message. For example: - Subject: [issue] This is a new issue - - this will create a new issue in the tracker with the title 'This is - a new issue'. - Subject: [issue1234] This is a followup to issue 1234 - - this will append the message's contents to the existing issue 1234 - in the tracker. + # pre-load the messages list + self.props['messages'] = [message_id] -Subject was: '%(subject)s' + def create_node(self): + ''' Create/update a node using self.props + ''' + classname = self.classname + try: + if self.nodeid: + # Check permissions for each property + for prop in self.props.keys(): + if not self.db.security.hasPermission('Edit', self.author, + classname, prop): + raise Unauthorized, _('You are not permitted to edit ' + 'property %(prop)s of class %(classname)s.' + ) % locals() + self.cl.set(self.nodeid, **self.props) + else: + # Check permissions for each property + for prop in self.props.keys(): + if not self.db.security.hasPermission('Create', self.author, + classname, prop): + raise Unauthorized, _('You are not permitted to set ' + 'property %(prop)s of class %(classname)s.' + ) % locals() + self.nodeid = self.cl.create(**self.props) + except (TypeError, IndexError, ValueError, exceptions.Reject), message: + raise MailUsageError, _(""" +There was a problem with the message you sent: + %(message)s """) % locals() - # get the optional nodeid - if pfxmode == 'none': - nodeid = None - else: - nodeid = matches['nodeid'] + return self.nodeid - # try in-reply-to to match the message if there's no nodeid - inreplyto = message.getheader('in-reply-to') or '' - if nodeid is None and inreplyto: - l = self.db.getclass('msg').stringFind(messageid=inreplyto) - if l: - nodeid = cl.filter(None, {'messages':l})[0] - # title is optional too - title = matches['title'] - if title: - title = title.strip() - else: - title = '' - # strip off the quotes that dumb emailers put around the subject, like - # Re: "[issue1] bla blah" - if matches['quote'] and title.endswith('"'): - title = title[:-1] +class MailGW: - # but we do need either a title or a nodeid... - if nodeid is None and not title: - raise MailUsageError, _(""" -I cannot match your message to a node in the database - you need to either -supply a full designator (with number, eg "[issue123]") or keep the -previous subject title intact so I can match that. + # To override the message parsing, derive your own class from + # parsedMessage and assign to parsed_message_class in a derived + # class of MailGW + parsed_message_class = parsedMessage -Subject was: "%(subject)s" -""") % locals() + def __init__(self, instance, arguments=()): + self.instance = instance + self.arguments = arguments + self.default_class = None + for option, value in self.arguments: + if option == '-c': + self.default_class = value.strip() - # If there's no nodeid, check to see if this is a followup and - # maybe someone's responded to the initial mail that created an - # entry. Try to find the matching nodes with the same title, and - # use the _last_ one matched (since that'll _usually_ be the most - # recent...). The subject_content_match config may specify an - # additional restriction based on the matched node's creation or - # activity. - tmatch_mode = config['MAILGW_SUBJECT_CONTENT_MATCH'] - if tmatch_mode != 'never' and nodeid is None and matches['refwd']: - l = cl.stringFind(title=title) - limit = None - if (tmatch_mode.startswith('creation') or - tmatch_mode.startswith('activity')): - limit, interval = tmatch_mode.split(' ', 1) - threshold = date.Date('.') - date.Interval(interval) - for id in l: - if limit: - if threshold < cl.get(id, limit): - nodeid = id - else: - nodeid = id + self.mailer = Mailer(instance.config) + self.logger = logging.getLogger('roundup.mailgw') - # if a nodeid was specified, make sure it's valid - if nodeid is not None and not cl.hasnode(nodeid): - if pfxmode == 'strict': - raise MailUsageError, _(""" -The node specified by the designator in the subject of your message -("%(nodeid)s") does not exist. + # should we trap exceptions (normal usage) or pass them through + # (for testing) + self.trapExceptions = 1 -Subject was: "%(subject)s" -""") % locals() + def do_pipe(self): + """ Read a message from standard input and pass it to the mail handler. + + Read into an internal structure that we can seek on (in case + there's an error). + + XXX: we may want to read this into a temporary file instead... + """ + s = cStringIO.StringIO() + s.write(sys.stdin.read()) + s.seek(0) + self.main(s) + return 0 + + def do_mailbox(self, filename): + """ Read a series of messages from the specified unix mailbox file and + pass each to the mail handler. + """ + # open the spool file and lock it + import fcntl + # FCNTL is deprecated in py2.3 and fcntl takes over all the symbols + if hasattr(fcntl, 'LOCK_EX'): + FCNTL = fcntl + else: + import FCNTL + f = open(filename, 'r+') + fcntl.flock(f.fileno(), FCNTL.LOCK_EX) + + # handle and clear the mailbox + try: + from mailbox import UnixMailbox + mailbox = UnixMailbox(f, factory=Message) + # grab one message + message = mailbox.next() + while message: + # handle this message + self.handle_Message(message) + message = mailbox.next() + # nuke the file contents + os.ftruncate(f.fileno(), 0) + except: + import traceback + traceback.print_exc() + return 1 + fcntl.flock(f.fileno(), FCNTL.LOCK_UN) + return 0 + + def do_imap(self, server, user='', password='', mailbox='', ssl=0, + cram=0): + ''' Do an IMAP connection + ''' + import getpass, imaplib, socket + try: + if not user: + user = raw_input('User: ') + if not password: + password = getpass.getpass() + except (KeyboardInterrupt, EOFError): + # Ctrl C or D maybe also Ctrl Z under Windows. + print "\nAborted by user." + return 1 + # open a connection to the server and retrieve all messages + try: + if ssl: + self.logger.debug('Trying server %r with ssl'%server) + server = imaplib.IMAP4_SSL(server) else: - title = subject - nodeid = None + self.logger.debug('Trying server %r without ssl'%server) + server = imaplib.IMAP4(server) + except (imaplib.IMAP4.error, socket.error, socket.sslerror): + self.logger.exception('IMAP server error') + return 1 - # Handle the arguments specified by the email gateway command line. - # We do this by looping over the list of self.arguments looking for - # a -C to tell us what class then the -S setting string. - msg_props = {} - user_props = {} - file_props = {} - issue_props = {} - # so, if we have any arguments, use them - if self.arguments: - current_class = 'msg' - for option, propstring in self.arguments: - if option in ( '-C', '--class'): - current_class = propstring.strip() - # XXX this is not flexible enough. - # we should chect for subclasses of these classes, - # not for the class name... - if current_class not in ('msg', 'file', 'user', 'issue'): - mailadmin = config['ADMIN_EMAIL'] - raise MailUsageError, _(""" -The mail gateway is not properly set up. Please contact -%(mailadmin)s and have them fix the incorrect class specified as: - %(current_class)s -""") % locals() - if option in ('-S', '--set'): - if current_class == 'issue' : - errors, issue_props = setPropArrayFromString(self, - cl, propstring.strip(), nodeid) - elif current_class == 'file' : - temp_cl = self.db.getclass('file') - errors, file_props = setPropArrayFromString(self, - temp_cl, propstring.strip()) - elif current_class == 'msg' : - temp_cl = self.db.getclass('msg') - errors, msg_props = setPropArrayFromString(self, - temp_cl, propstring.strip()) - elif current_class == 'user' : - temp_cl = self.db.getclass('user') - errors, user_props = setPropArrayFromString(self, - temp_cl, propstring.strip()) - if errors: - mailadmin = config['ADMIN_EMAIL'] - raise MailUsageError, _(""" -The mail gateway is not properly set up. Please contact -%(mailadmin)s and have them fix the incorrect properties: - %(errors)s -""") % locals() + try: + if cram: + server.login_cram_md5(user, password) + else: + server.login(user, password) + except imaplib.IMAP4.error, e: + self.logger.exception('IMAP login failure') + return 1 - # - # handle the users - # - # Don't create users if anonymous isn't allowed to register - create = 1 - anonid = self.db.user.lookup('anonymous') - if not (self.db.security.hasPermission('Create', anonid, 'user') - and self.db.security.hasPermission('Email Access', anonid)): - create = 0 + try: + if not mailbox: + (typ, data) = server.select() + else: + (typ, data) = server.select(mailbox=mailbox) + if typ != 'OK': + self.logger.error('Failed to get mailbox %r: %s'%(mailbox, + data)) + return 1 + try: + numMessages = int(data[0]) + except ValueError, value: + self.logger.error('Invalid message count from mailbox %r'% + data[0]) + return 1 + for i in range(1, numMessages+1): + (typ, data) = server.fetch(str(i), '(RFC822)') + + # mark the message as deleted. + server.store(str(i), '+FLAGS', r'(\Deleted)') + + # process the message + s = cStringIO.StringIO(data[0][1]) + s.seek(0) + self.handle_Message(Message(s)) + server.close() + finally: + try: + server.expunge() + except: + pass + server.logout() + + return 0 + + + def do_apop(self, server, user='', password='', ssl=False): + ''' Do authentication POP + ''' + self._do_pop(server, user, password, True, ssl) + + def do_pop(self, server, user='', password='', ssl=False): + ''' Do plain POP + ''' + self._do_pop(server, user, password, False, ssl) + + def _do_pop(self, server, user, password, apop, ssl): + '''Read a series of messages from the specified POP server. + ''' + import getpass, poplib, socket + try: + if not user: + user = raw_input('User: ') + if not password: + password = getpass.getpass() + except (KeyboardInterrupt, EOFError): + # Ctrl C or D maybe also Ctrl Z under Windows. + print "\nAborted by user." + return 1 + + # open a connection to the server and retrieve all messages + try: + if ssl: + klass = poplib.POP3_SSL + else: + klass = poplib.POP3 + server = klass(server) + except socket.error: + self.logger.exception('POP server error') + return 1 + if apop: + server.apop(user, password) + else: + server.user(user) + server.pass_(password) + numMessages = len(server.list()[1]) + for i in range(1, numMessages+1): + # retr: returns + # [ pop response e.g. '+OK 459 octets', + # [ array of message lines ], + # number of octets ] + lines = server.retr(i)[1] + s = cStringIO.StringIO('\n'.join(lines)) + s.seek(0) + self.handle_Message(Message(s)) + # delete the message + server.dele(i) + + # quit the server to commit changes. + server.quit() + return 0 - # ok, now figure out who the author is - create a new user if the - # "create" flag is true - author = uidFromAddress(self.db, from_list[0], create=create) + def main(self, fp): + ''' fp - the file from which to read the Message. + ''' + return self.handle_Message(Message(fp)) - # if we're not recognised, and we don't get added as a user, then we - # must be anonymous - if not author: - author = anonid + def handle_Message(self, message): + """Handle an RFC822 Message - # make sure the author has permission to use the email interface - if not self.db.security.hasPermission('Email Access', author): - if author == anonid: - # we're anonymous and we need to be a registered user - from_address = from_list[0][1] - registration_info = "" - if self.db.security.hasPermission('Web Access', author) and \ - self.db.security.hasPermission('Create', anonid, 'user'): - tracker_web = self.instance.config.TRACKER_WEB - registration_info = """ Please register at: + Handle the Message object by calling handle_message() and then cope + with any errors raised by handle_message. + This method's job is to make that call and handle any + errors in a sane manner. It should be replaced if you wish to + handle errors in a different manner. + """ + # in some rare cases, a particularly stuffed-up e-mail will make + # its way into here... try to handle it gracefully -%(tracker_web)suser?template=register + sendto = message.getaddrlist('resent-from') + if not sendto: + sendto = message.getaddrlist('from') + if not sendto: + # very bad-looking message - we don't even know who sent it + msg = ['Badly formed message from mail gateway. Headers:'] + msg.extend(message.headers) + msg = '\n'.join(map(str, msg)) + self.logger.error(msg) + return -...before sending mail to the tracker.""" % locals() + msg = 'Handling message' + if message.getheader('message-id'): + msg += ' (Message-id=%r)'%message.getheader('message-id') + self.logger.info(msg) - raise Unauthorized, _(""" -You are not a registered user.%(registration_info)s + # try normal message-handling + if not self.trapExceptions: + return self.handle_message(message) -Unknown address: %(from_address)s -""") % locals() - else: - # we're registered and we're _still_ not allowed access - raise Unauthorized, _( - 'You are not permitted to access this tracker.') + # no, we want to trap exceptions + try: + return self.handle_message(message) + except MailUsageHelp: + # bounce the message back to the sender with the usage message + fulldoc = '\n'.join(string.split(__doc__, '\n')[2:]) + m = [''] + m.append('\n\nMail Gateway Help\n=================') + m.append(fulldoc) + self.mailer.bounce_message(message, [sendto[0][1]], m, + subject="Mail Gateway Help") + except MailUsageError, value: + # bounce the message back to the sender with the usage message + fulldoc = '\n'.join(string.split(__doc__, '\n')[2:]) + m = [''] + m.append(str(value)) + m.append('\n\nMail Gateway Help\n=================') + m.append(fulldoc) + self.mailer.bounce_message(message, [sendto[0][1]], m) + except Unauthorized, value: + # just inform the user that he is not authorized + m = [''] + m.append(str(value)) + self.mailer.bounce_message(message, [sendto[0][1]], m) + except IgnoreMessage: + # do not take any action + # this exception is thrown when email should be ignored + msg = 'IgnoreMessage raised' + if message.getheader('message-id'): + msg += ' (Message-id=%r)'%message.getheader('message-id') + self.logger.info(msg) + return + except: + msg = 'Exception handling message' + if message.getheader('message-id'): + msg += ' (Message-id=%r)'%message.getheader('message-id') + self.logger.exception(msg) - # make sure they're allowed to edit or create this class of information - if nodeid: - if not self.db.security.hasPermission('Edit', author, classname, - itemid=nodeid): - raise Unauthorized, _( - 'You are not permitted to edit %(classname)s.') % locals() - else: - if not self.db.security.hasPermission('Create', author, classname): - raise Unauthorized, _( - 'You are not permitted to create %(classname)s.' - ) % locals() + # bounce the message back to the sender with the error message + # let the admin know that something very bad is happening + m = [''] + m.append('An unexpected error occurred during the processing') + m.append('of your message. The tracker administrator is being') + m.append('notified.\n') + self.mailer.bounce_message(message, [sendto[0][1]], m) - # the author may have been created - make sure the change is - # committed before we reopen the database - self.db.commit() + m.append('----------------') + m.append(traceback.format_exc()) + self.mailer.bounce_message(message, [self.instance.config.ADMIN_EMAIL], m) - # set the database user as the author - username = self.db.user.get(author, 'username') - self.db.setCurrentUser(username) + def handle_message(self, message): + ''' message - a Message instance - # re-get the class with the new database connection - cl = self.db.getclass(classname) + Parse the message as per the module docstring. + ''' + # get database handle for handling one email + self.db = self.instance.open ('admin') + try: + return self._handle_message(message) + finally: + self.db.close() - # now update the recipients list - recipients = [] - tracker_email = config['TRACKER_EMAIL'].lower() - for recipient in message.getaddrlist('to') + message.getaddrlist('cc'): - r = recipient[1].strip().lower() - if r == tracker_email or not r: - continue + def _handle_message(self, message): + ''' message - a Message instance - # look up the recipient - create if necessary (and we're - # allowed to) - recipient = uidFromAddress(self.db, recipient, create, **user_props) + Parse the message as per the module docstring. + The following code expects an opened database and a try/finally + that closes the database. + ''' + parsed_message = self.parsed_message_class(self, message) - # if all's well, add the recipient to the list - if recipient: - recipients.append(recipient) + # Filter out messages to ignore + parsed_message.handle_ignore() + + # Check for usage/help requests + parsed_message.handle_help() + + # Check if the subject line is valid + parsed_message.check_subject() - # - # handle the subject argument list - # - # figure what the properties of this Class are - properties = cl.getprops() - props = {} - args = matches['args'] - argswhole = matches['argswhole'] - if args: - if sfxmode == 'none': - title += ' ' + argswhole - else: - errors, props = setPropArrayFromString(self, cl, args, nodeid) - # handle any errors parsing the argument list - if errors: - if sfxmode == 'strict': - errors = '\n- '.join(map(str, errors)) - raise MailUsageError, _(""" -There were problems handling your subject line argument list: -- %(errors)s + # XXX Don't enable. This doesn't work yet. + # XXX once this works it should be moved to parsedMessage class +# "[^A-z.]tracker\+(?P<classname>[^\d\s]+)(?P<nodeid>\d+)\@some.dom.ain[^A-z.]" + # handle delivery to addresses like:tracker+issue25 at some.dom.ain + # use the embedded issue number as our issue +# issue_re = config['MAILGW_ISSUE_ADDRESS_RE'] +# if issue_re: +# for header in ['to', 'cc', 'bcc']: +# addresses = message.getheader(header, '') +# if addresses: +# # FIXME, this only finds the first match in the addresses. +# issue = re.search(issue_re, addresses, 'i') +# if issue: +# classname = issue.group('classname') +# nodeid = issue.group('nodeid') +# break -Subject was: "%(subject)s" -""") % locals() - else: - title += ' ' + argswhole + # Parse the subject line to get the importants parts + parsed_message.parse_subject() + # check for registration OTK + if parsed_message.rego_confirm(): + return - # set the issue title to the subject - title = title.strip() - if (title and properties.has_key('title') and not - issue_props.has_key('title')): - issue_props['title'] = title + # get the classname + parsed_message.get_classname() - # - # handle message-id and in-reply-to - # - messageid = message.getheader('message-id') - # generate a messageid if there isn't one - if not messageid: - messageid = "<%s.%s.%s%s@%s>"%(time.time(), random.random(), - classname, nodeid, config['MAIL_DOMAIN']) + # get the optional nodeid + parsed_message.get_nodeid() - # if they've enabled PGP processing then verify the signature - # or decrypt the message + # Determine who the author is + parsed_message.get_author_id() + + # make sure they're allowed to edit or create this class + parsed_message.check_node_permissions() - # if PGP_ROLES is specified the user must have a Role in the list - # or we will skip PGP processing - def pgp_role(): - if self.instance.config.PGP_ROLES: - return user_has_role(self.db, author, - self.instance.config.PGP_ROLES) - else: - return True + # author may have been created: + # commit author to database and re-open as author + parsed_message.commit_and_reopen_as_author() - if self.instance.config.PGP_ENABLE and pgp_role(): - assert pyme, 'pyme is not installed' - # signed/encrypted mail must come from the primary address - author_address = self.db.user.get(author, 'address') - if self.instance.config.PGP_HOMEDIR: - os.environ['GNUPGHOME'] = self.instance.config.PGP_HOMEDIR - if message.pgp_signed(): - message.verify_signature(author_address) - elif message.pgp_encrypted(): - # replace message with the contents of the decrypted - # message for content extraction - # TODO: encrypted message handling is far from perfect - # bounces probably include the decrypted message, for - # instance :( - message = message.decrypt(author_address) - else: - raise MailUsageError, _(""" -This tracker has been configured to require all email be PGP signed or -encrypted.""") - # now handle the body - find the message - ig = self.instance.config.MAILGW_IGNORE_ALTERNATIVES - content, attachments = message.extract_content(ignore_alternatives = ig) - if content is None: - raise MailUsageError, _(""" -Roundup requires the submission to be plain text. The message parser could -not find a text/plain part to use. -""") + # Get the recipients list + parsed_message.get_recipients() - # parse the body of the message, stripping out bits as appropriate - summary, content = parseContent(content, config=config) - content = content.strip() + # get the new/updated node props + parsed_message.get_props() - # - # handle the attachments - # - if properties.has_key('files'): - files = [] - for (name, mime_type, data) in attachments: - if not self.db.security.hasPermission('Create', author, 'file'): - raise Unauthorized, _( - 'You are not permitted to create files.') - if not name: - name = "unnamed" - try: - fileid = self.db.file.create(type=mime_type, name=name, - content=data, **file_props) - except exceptions.Reject: - pass - else: - files.append(fileid) - # attach the files to the issue - if not self.db.security.hasPermission('Edit', author, - classname, 'files'): - raise Unauthorized, _( - 'You are not permitted to add files to %(classname)s.' - ) % locals() + # Handle PGP signed or encrypted messages + parsed_message.get_pgp_message() - if nodeid: - # extend the existing files list - fileprop = cl.get(nodeid, 'files') - fileprop.extend(files) - props['files'] = fileprop - else: - # pre-load the files list - props['files'] = files + # extract content and attachments from message body + parsed_message.get_content_and_attachments() - # + # put attachments into files linked to the issue + parsed_message.create_files() + # create the message if there's a message body (content) - # - if (content and properties.has_key('messages')): - if not self.db.security.hasPermission('Create', author, 'msg'): - raise Unauthorized, _( - 'You are not permitted to create messages.') + parsed_message.create_msg() + + # perform the node change / create + nodeid = parsed_message.create_node() - try: - message_id = self.db.msg.create(author=author, - recipients=recipients, date=date.Date('.'), - summary=summary, content=content, files=files, - messageid=messageid, inreplyto=inreplyto, **msg_props) - except exceptions.Reject, error: - raise MailUsageError, _(""" -Mail message was rejected by a detector. -%(error)s -""") % locals() - # attach the message to the node - if not self.db.security.hasPermission('Edit', author, - classname, 'messages'): - raise Unauthorized, _( - 'You are not permitted to add messages to %(classname)s.' - ) % locals() + # commit the changes to the DB + self.db.commit() - if nodeid: - # add the message to the node's list - messages = cl.get(nodeid, 'messages') - messages.append(message_id) - props['messages'] = messages - else: - # pre-load the messages list - props['messages'] = [message_id] + return nodeid - # - # perform the node change / create - # - try: - # merge the command line props defined in issue_props into - # the props dictionary because function(**props, **issue_props) - # is a syntax error. - for prop in issue_props.keys() : - if not props.has_key(prop) : - props[prop] = issue_props[prop] - - # Check permissions for each property - for prop in props.keys(): - if not self.db.security.hasPermission('Edit', author, - classname, prop): - raise Unauthorized, _('You are not permitted to edit ' - 'property %(prop)s of class %(classname)s.') % locals() + def get_class_arguments(self, class_type, classname=None): + ''' class_type - a valid node class type: + - 'user' refers to the author of a message + - 'issue' refers to an issue-type class (to which the + message is appended) specified in parameter classname + Note that this need not be the real classname, we get + the real classname used as a parameter (from previous + message-parsing steps) + - 'file' specifies a file-type class + - 'msg' is the message-class + classname - the name of the current issue-type class + + Parse the commandline arguments and retrieve the properties that + are relevant to the class_type. We now allow multiple -S options + per class_type (-C option). + ''' + allprops = {} - if nodeid: - cl.set(nodeid, **props) - else: - nodeid = cl.create(**props) - except (TypeError, IndexError, ValueError, exceptions.Reject), message: + classname = classname or class_type + cls_lookup = { 'issue' : classname } + + # Allow other issue-type classes -- take the real classname from + # previous parsing-steps of the message: + clsname = cls_lookup.get (class_type, class_type) + + # check if the clsname is valid + try: + self.db.getclass(clsname) + except KeyError: + mailadmin = self.instance.config['ADMIN_EMAIL'] raise MailUsageError, _(""" -There was a problem with the message you sent: - %(message)s +The mail gateway is not properly set up. Please contact +%(mailadmin)s and have them fix the incorrect class specified as: + %(clsname)s """) % locals() + + if self.arguments: + # The default type on the commandline is msg + if class_type == 'msg': + current_type = class_type + else: + current_type = None + + # Handle the arguments specified by the email gateway command line. + # We do this by looping over the list of self.arguments looking for + # a -C to match the class we want, then use the -S setting string. + for option, propstring in self.arguments: + if option in ( '-C', '--class'): + current_type = propstring.strip() + + if current_type != class_type: + current_type = None + + elif current_type and option in ('-S', '--set'): + cls = cls_lookup.get (current_type, current_type) + temp_cl = self.db.getclass(cls) + errors, props = setPropArrayFromString(self, + temp_cl, propstring.strip()) - # commit the changes to the DB - self.db.commit() + if errors: + mailadmin = self.instance.config['ADMIN_EMAIL'] + raise MailUsageError, _(""" +The mail gateway is not properly set up. Please contact +%(mailadmin)s and have them fix the incorrect properties: + %(errors)s +""") % locals() + allprops.update(props) - return nodeid + return allprops def setPropArrayFromString(self, cl, propString, nodeid=None): @@ -1484,7 +1696,7 @@ try: return db.user.create(username=trying, address=address, realname=realname, roles=db.config.NEW_EMAIL_USER_ROLES, - password=password.Password(password.generatePassword()), + password=password.Password(password.generatePassword(), config=db.config), **user_props) except exceptions.Reject: return 0 Modified: tracker/roundup-src/roundup/password.py ============================================================================== --- tracker/roundup-src/roundup/password.py (original) +++ tracker/roundup-src/roundup/password.py Thu Aug 4 15:46:52 2011 @@ -22,22 +22,135 @@ __docformat__ = 'restructuredtext' import re, string, random -from roundup.anypy.hashlib_ import md5, sha1 +from base64 import b64encode, b64decode +from roundup.anypy.hashlib_ import md5, sha1, shamodule try: import crypt except ImportError: crypt = None +_bempty = "" +_bjoin = _bempty.join + +def getrandbytes(count): + return _bjoin(chr(random.randint(0,255)) for i in xrange(count)) + +#NOTE: PBKDF2 hash is using this variant of base64 to minimize encoding size, +# and have charset that's compatible w/ unix crypt variants +def h64encode(data): + """encode using variant of base64""" + return b64encode(data, "./").strip("=\n") + +def h64decode(data): + """decode using variant of base64""" + off = len(data) % 4 + if off == 0: + return b64decode(data, "./") + elif off == 1: + raise ValueError("invalid bas64 input") + elif off == 2: + return b64decode(data + "==", "./") + else: + return b64decode(data + "=", "./") + +try: + from M2Crypto.EVP import pbkdf2 as _pbkdf2 +except ImportError: + #no m2crypto - make our own pbkdf2 function + from struct import pack + from hmac import HMAC + + def xor_bytes(left, right): + "perform bitwise-xor of two byte-strings" + return _bjoin(chr(ord(l) ^ ord(r)) for l, r in zip(left, right)) + + def _pbkdf2(password, salt, rounds, keylen): + digest_size = 20 # sha1 generates 20-byte blocks + total_blocks = int((keylen+digest_size-1)/digest_size) + hmac_template = HMAC(password, None, shamodule) + out = _bempty + for i in xrange(1, total_blocks+1): + hmac = hmac_template.copy() + hmac.update(salt + pack(">L",i)) + block = tmp = hmac.digest() + for j in xrange(rounds-1): + hmac = hmac_template.copy() + hmac.update(tmp) + tmp = hmac.digest() + #TODO: need to speed up this call + block = xor_bytes(block, tmp) + out += block + return out[:keylen] + +def pbkdf2(password, salt, rounds, keylen): + """pkcs#5 password-based key derivation v2.0 + + :arg password: passphrase to use to generate key (if unicode, converted to utf-8) + :arg salt: salt string to use when generating key (if unicode, converted to utf-8) + :param rounds: number of rounds to use to generate key + :arg keylen: number of bytes to generate + + If M2Crypto is present, uses it's implementation as backend. + + :returns: + raw bytes of generated key + """ + if isinstance(password, unicode): + password = password.encode("utf-8") + if isinstance(salt, unicode): + salt = salt.encode("utf-8") + if keylen > 40: + #NOTE: pbkdf2 allows up to (2**31-1)*20 bytes, + # but m2crypto has issues on some platforms above 40, + # and such sizes aren't needed for a password hash anyways... + raise ValueError, "key length too large" + if rounds < 1: + raise ValueError, "rounds must be positive number" + return _pbkdf2(password, salt, rounds, keylen) + class PasswordValueError(ValueError): """ The password value is not valid """ pass -def encodePassword(plaintext, scheme, other=None): +def pbkdf2_unpack(pbkdf2): + """ unpack pbkdf2 encrypted password into parts, + assume it has format "{rounds}${salt}${digest} + """ + if isinstance(pbkdf2, unicode): + pbkdf2 = pbkdf2.encode("ascii") + try: + rounds, salt, digest = pbkdf2.split("$") + except ValueError: + raise PasswordValueError, "invalid PBKDF2 hash (wrong number of separators)" + if rounds.startswith("0"): + raise PasswordValueError, "invalid PBKDF2 hash (zero-padded rounds)" + try: + rounds = int(rounds) + except ValueError: + raise PasswordValueError, "invalid PBKDF2 hash (invalid rounds)" + raw_salt = h64decode(salt) + return rounds, salt, raw_salt, digest + +def encodePassword(plaintext, scheme, other=None, config=None): """Encrypt the plaintext password. """ if plaintext is None: plaintext = "" - if scheme == 'SHA': + if scheme == "PBKDF2": + if other: + rounds, salt, raw_salt, digest = pbkdf2_unpack(other) + else: + raw_salt = getrandbytes(20) + salt = h64encode(raw_salt) + if config: + rounds = config.PASSWORD_PBKDF2_DEFAULT_ROUNDS + else: + rounds = 10000 + if rounds < 1000: + raise PasswordValueError, "invalid PBKDF2 hash (rounds too low)" + raw_digest = pbkdf2(plaintext, raw_salt, rounds, 20) + return "%d$%s$%s" % (rounds, salt, h64encode(raw_digest)) + elif scheme == 'SHA': s = sha1(plaintext).hexdigest() elif scheme == 'MD5': s = md5(plaintext).hexdigest() @@ -58,7 +171,49 @@ chars = string.letters+string.digits return ''.join([random.choice(chars) for x in range(length)]) -class Password: +class JournalPassword: + """ Password dummy instance intended for journal operation. + We do not store passwords in the journal any longer. The dummy + version only reads the encryption scheme from the given + encrypted password. + """ + default_scheme = 'PBKDF2' # new encryptions use this scheme + pwre = re.compile(r'{(\w+)}(.+)') + + def __init__ (self, encrypted=''): + if isinstance(encrypted, self.__class__): + self.scheme = encrypted.scheme or self.default_scheme + else: + m = self.pwre.match(encrypted) + if m: + self.scheme = m.group(1) + else: + self.scheme = self.default_scheme + self.password = '' + + def dummystr(self): + """ return dummy string to store in journal + - reports scheme, but nothing else + """ + return "{%s}*encrypted*" % (self.scheme,) + + __str__ = dummystr + + def __cmp__(self, other): + """Compare this password against another password.""" + # check to see if we're comparing instances + if isinstance(other, self.__class__): + if self.scheme != other.scheme: + return cmp(self.scheme, other.scheme) + return cmp(self.password, other.password) + + # assume password is plaintext + if self.password is None: + raise ValueError, 'Password not set' + return cmp(self.password, encodePassword(other, self.scheme, + self.password or None)) + +class Password(JournalPassword): """The class encapsulates a Password property type value in the database. The encoding of the password is one if None, 'SHA', 'MD5' or 'plaintext'. @@ -80,24 +235,36 @@ >>> 'not sekrit' != p 1 """ + #TODO: code to migrate from old password schemes. - default_scheme = 'SHA' # new encryptions use this scheme - pwre = re.compile(r'{(\w+)}(.+)') + deprecated_schemes = ["SHA", "MD5", "crypt", "plaintext"] + known_schemes = ["PBKDF2"] + deprecated_schemes - def __init__(self, plaintext=None, scheme=None, encrypted=None): + def __init__(self, plaintext=None, scheme=None, encrypted=None, strict=False, config=None): """Call setPassword if plaintext is not None.""" if scheme is None: scheme = self.default_scheme if plaintext is not None: - self.setPassword (plaintext, scheme) + self.setPassword (plaintext, scheme, config=config) elif encrypted is not None: - self.unpack(encrypted, scheme) + self.unpack(encrypted, scheme, strict=strict, config=config) else: self.scheme = self.default_scheme self.password = None self.plaintext = None - def unpack(self, encrypted, scheme=None): + def needs_migration(self): + """ Password has insecure scheme or other insecure parameters + and needs migration to new password scheme + """ + if self.scheme in self.deprecated_schemes: + return True + rounds, salt, raw_salt, digest = pbkdf2_unpack(self.password) + if rounds < 1000: + return True + return False + + def unpack(self, encrypted, scheme=None, strict=False, config=None): """Set the password info from the scheme:<encryted info> string (the inverse of __str__) """ @@ -108,30 +275,18 @@ self.plaintext = None else: # currently plaintext - encrypt - self.setPassword(encrypted, scheme) + self.setPassword(encrypted, scheme, config=config) + if strict and self.scheme not in self.known_schemes: + raise PasswordValueError, "unknown encryption scheme: %r" % (self.scheme,) - def setPassword(self, plaintext, scheme=None): + def setPassword(self, plaintext, scheme=None, config=None): """Sets encrypts plaintext.""" if scheme is None: scheme = self.default_scheme self.scheme = scheme - self.password = encodePassword(plaintext, scheme) + self.password = encodePassword(plaintext, scheme, config=config) self.plaintext = plaintext - def __cmp__(self, other): - """Compare this password against another password.""" - # check to see if we're comparing instances - if isinstance(other, Password): - if self.scheme != other.scheme: - return cmp(self.scheme, other.scheme) - return cmp(self.password, other.password) - - # assume password is plaintext - if self.password is None: - raise ValueError, 'Password not set' - return cmp(self.password, encodePassword(other, self.scheme, - self.password)) - def __str__(self): """Stringify the encrypted password for database storage.""" if self.password is None: @@ -158,6 +313,22 @@ assert p == 'sekrit' assert p != 'not sekrit' assert 'sekrit' == p + assert 'not sekrit' != p + + # PBKDF2 - low level function + from binascii import unhexlify + k = pbkdf2("password", "ATHENA.MIT.EDUraeburn", 1200, 32) + assert k == unhexlify("5c08eb61fdf71e4e4ec3cf6ba1f5512ba7e52ddbc5e5142f708a31e2e62b1e13") + + # PBKDF2 - hash function + h = "5000$7BvbBq.EZzz/O0HuwX3iP.nAG3s$g3oPnFFaga2BJaX5PoPRljl4XIE" + assert encodePassword("sekrit", "PBKDF2", h) == h + + # PBKDF2 - high level integration + p = Password('sekrit', 'PBKDF2') + assert p == 'sekrit' + assert p != 'not sekrit' + assert 'sekrit' == p assert 'not sekrit' != p if __name__ == '__main__': Modified: tracker/roundup-src/roundup/roundupdb.py ============================================================================== --- tracker/roundup-src/roundup/roundupdb.py (original) +++ tracker/roundup-src/roundup/roundupdb.py Thu Aug 4 15:46:52 2011 @@ -31,11 +31,14 @@ from email.MIMEText import MIMEText from email.MIMEBase import MIMEBase +from anypy.email_ import FeedParser + from roundup import password, date, hyperdb from roundup.i18n import _ # MessageSendError is imported for backwards compatibility -from roundup.mailer import Mailer, MessageSendError, encode_quopri +from roundup.mailer import Mailer, MessageSendError, encode_quopri, \ + nice_sender_header class Database: @@ -100,8 +103,7 @@ elif isinstance(proptype, hyperdb.Interval): props[propname] = date.Interval(value) elif isinstance(proptype, hyperdb.Password): - props[propname] = password.Password() - props[propname].unpack(value) + props[propname] = password.Password(encrypted=value) # tag new user creation with 'admin' self.journaltag = 'admin' @@ -136,7 +138,7 @@ # Because getting a logger requires acquiring a lock, we want # to do it only once. if not hasattr(self, '__logger'): - self.__logger = logging.getLogger('hyperdb') + self.__logger = logging.getLogger('roundup.hyperdb') return self.__logger @@ -178,7 +180,7 @@ ) # New methods: - def addmessage(self, nodeid, summary, text): + def addmessage(self, issueid, summary, text): """Add a message to an issue's mail spool. A new "msg" node is constructed using the current date, the user that @@ -191,8 +193,8 @@ appended to the "messages" field of the specified issue. """ - def nosymessage(self, nodeid, msgid, oldvalues, whichnosy='nosy', - from_address=None, cc=[], bcc=[]): + def nosymessage(self, issueid, msgid, oldvalues, whichnosy='nosy', + from_address=None, cc=[], bcc=[], cc_emails = [], bcc_emails = []): """Send a message to the members of an issue's nosy list. The message is sent only to users on the nosy list who are not @@ -211,6 +213,12 @@ message to that may not be specified in the message's recipients list. These recipients will not be included in the To: or Cc: address lists. + + The cc_emails and bcc_emails arguments take a list of additional + recipient email addresses (just the mail address not roundup users) + this can be useful for sending to additional email addresses which are no + roundup users. These arguments are currently not used by roundups + nosyreaction but can be used by customized (nosy-)reactors. """ if msgid: authid = self.db.msg.get(msgid, 'author') @@ -227,18 +235,29 @@ seen_message[recipient] = 1 def add_recipient(userid, to): - # make sure they have an address + """ make sure they have an address """ address = self.db.user.get(userid, 'address') if address: to.append(address) recipients.append(userid) def good_recipient(userid): - # Make sure we don't send mail to either the anonymous - # user or a user who has already seen the message. + """ Make sure we don't send mail to either the anonymous + user or a user who has already seen the message. + Also check permissions on the message if not a system + message: A user must have view permission on content and + files to be on the receiver list. We do *not* check the + author etc. for now. + """ + allowed = True + if msgid: + for prop in 'content', 'files': + if prop in self.db.msg.properties: + allowed = allowed and self.db.security.hasPermission( + 'View', userid, 'msg', prop, msgid) return (userid and (self.db.user.get(userid, 'username') != 'anonymous') and - not seen_message.has_key(userid)) + allowed and not seen_message.has_key(userid)) # possibly send the message to the author, as long as they aren't # anonymous @@ -251,34 +270,36 @@ seen_message[authid] = 1 # now deal with the nosy and cc people who weren't recipients. - for userid in cc + self.get(nodeid, whichnosy): + for userid in cc + self.get(issueid, whichnosy): if good_recipient(userid): add_recipient(userid, sendto) + sendto.extend (cc_emails) # now deal with bcc people. for userid in bcc: if good_recipient(userid): add_recipient(userid, bcc_sendto) + bcc_sendto.extend (bcc_emails) if oldvalues: - note = self.generateChangeNote(nodeid, oldvalues) + note = self.generateChangeNote(issueid, oldvalues) else: - note = self.generateCreateNote(nodeid) + note = self.generateCreateNote(issueid) # If we have new recipients, update the message's recipients # and send the mail. if sendto or bcc_sendto: if msgid is not None: self.db.msg.set(msgid, recipients=recipients) - self.send_message(nodeid, msgid, note, sendto, from_address, + self.send_message(issueid, msgid, note, sendto, from_address, bcc_sendto) # backwards compatibility - don't remove sendmessage = nosymessage - def send_message(self, nodeid, msgid, note, sendto, from_address=None, + def send_message(self, issueid, msgid, note, sendto, from_address=None, bcc_sendto=[], authid=None): - '''Actually send the nominated message from this node to the sendto + '''Actually send the nominated message from this issue to the sendto recipients, with the note appended. ''' users = self.db.user @@ -297,14 +318,14 @@ # this is an old message that didn't get a messageid, so # create one messageid = "<%s.%s.%s%s@%s>"%(time.time(), random.random(), - self.classname, nodeid, + self.classname, issueid, self.db.config.MAIL_DOMAIN) if msgid is not None: messages.set(msgid, messageid=messageid) # compose title cn = self.classname - title = self.get(nodeid, 'title') or '%s message copy'%cn + title = self.get(issueid, 'title') or '%s message copy'%cn # figure author information if authid: @@ -331,11 +352,11 @@ # put in roundup's signature if self.db.config.EMAIL_SIGNATURE_POSITION == 'top': - m.append(self.email_signature(nodeid, msgid)) + m.append(self.email_signature(issueid, msgid)) # add author information if authid and self.db.config.MAIL_ADD_AUTHORINFO: - if msgid and len(self.get(nodeid, 'messages')) == 1: + if msgid and len(self.get(issueid, 'messages')) == 1: m.append(_("New submission from %(authname)s%(authaddr)s:") % locals()) elif msgid: @@ -355,8 +376,7 @@ if msgid : for fileid in messages.get(msgid, 'files') : # check the attachment size - filename = self.db.filename('file', fileid, None) - filesize = os.path.getsize(filename) + filesize = self.db.filesize('file', fileid, None) if filesize <= self.db.config.NOSY_MAX_ATTACHMENT_SIZE: message_files.append(fileid) else: @@ -372,7 +392,7 @@ # put in roundup's signature if self.db.config.EMAIL_SIGNATURE_POSITION == 'bottom': - m.append(self.email_signature(nodeid, msgid)) + m.append(self.email_signature(issueid, msgid)) # figure the encoding charset = getattr(self.db.config, 'EMAIL_CHARSET', 'utf-8') @@ -392,7 +412,7 @@ if from_tag: from_tag = ' ' + from_tag - subject = '[%s%s] %s'%(cn, nodeid, title) + subject = '[%s%s] %s'%(cn, issueid, title) author = (authname + from_tag, from_address) # send an individual message per recipient? @@ -401,9 +421,10 @@ else: sendto = [sendto] + # tracker sender info tracker_name = unicode(self.db.config.TRACKER_NAME, 'utf-8') - tracker_name = formataddr((tracker_name, from_address)) - tracker_name = Header(tracker_name, charset) + tracker_name = nice_sender_header(tracker_name, from_address, + charset) # now send one or more messages # TODO: I believe we have to create a new message each time as we @@ -435,12 +456,12 @@ if not 'name' in cl.getprops(): continue if isinstance(prop, hyperdb.Link): - value = self.get(nodeid, propname) + value = self.get(issueid, propname) if value is None: continue values = [value] else: - values = self.get(nodeid, propname) + values = self.get(issueid, propname) if not values: continue values = [cl.get(v, 'name') for v in values] @@ -453,11 +474,11 @@ if not inreplyto: # Default the reply to the first message - msgs = self.get(nodeid, 'messages') + msgs = self.get(issueid, 'messages') # Assume messages are sorted by increasing message number here # If the issue is just being created, and the submitter didn't # provide a message, then msgs will be empty. - if msgs and msgs[0] != nodeid: + if msgs and msgs[0] != msgid: inreplyto = messages.get(msgs[0], 'messageid') if inreplyto: message['In-Reply-To'] = inreplyto @@ -466,6 +487,7 @@ if message_files: # first up the text as a part part = MIMEText(body) + part.set_charset(charset) encode_quopri(part) message.attach(part) @@ -485,6 +507,12 @@ else: part = MIMEText(content) part['Content-Transfer-Encoding'] = '7bit' + elif mime_type == 'message/rfc822': + main, sub = mime_type.split('/') + p = FeedParser() + p.feed(content) + part = MIMEBase(main, sub) + part.set_payload([p.close()]) else: # some other type, so encode it if not mime_type: @@ -496,7 +524,8 @@ part = MIMEBase(main, sub) part.set_payload(content) Encoders.encode_base64(part) - part['Content-Disposition'] = 'attachment;\n filename="%s"'%name + cd = 'Content-Disposition' + part[cd] = 'attachment;\n filename="%s"'%name message.attach(part) else: @@ -509,7 +538,7 @@ mailer.smtp_send(sendto, message.as_string()) first = False - def email_signature(self, nodeid, msgid): + def email_signature(self, issueid, msgid): ''' Add a signature to the e-mail with some useful information ''' # simplistic check to see if the url is valid, @@ -522,7 +551,7 @@ else: if not base.endswith('/'): base = base + '/' - web = base + self.classname + nodeid + web = base + self.classname + issueid # ensure the email address is properly quoted email = formataddr((self.db.config.TRACKER_NAME, @@ -532,7 +561,7 @@ return '\n%s\n%s\n<%s>\n%s'%(line, email, web, line) - def generateCreateNote(self, nodeid): + def generateCreateNote(self, issueid): """Generate a create note that lists initial property values """ cn = self.classname @@ -544,7 +573,7 @@ prop_items = props.items() prop_items.sort() for propname, prop in prop_items: - value = cl.get(nodeid, propname, None) + value = cl.get(issueid, propname, None) # skip boring entries if not value: continue @@ -574,7 +603,7 @@ m.insert(0, '') return '\n'.join(m) - def generateChangeNote(self, nodeid, oldvalues): + def generateChangeNote(self, issueid, oldvalues): """Generate a change note that lists property changes """ if not isinstance(oldvalues, type({})): @@ -595,7 +624,7 @@ # not all keys from oldvalues might be available in database # this happens when property was deleted try: - new_value = cl.get(nodeid, key) + new_value = cl.get(issueid, key) except KeyError: continue # the old value might be non existent @@ -616,7 +645,7 @@ changed_items.sort() for propname, oldvalue in changed_items: prop = props[propname] - value = cl.get(nodeid, propname, None) + value = cl.get(issueid, propname, None) if isinstance(prop, hyperdb.Link): link = self.db.classes[prop.classname] key = link.labelprop(default_to_id=1) Modified: tracker/roundup-src/roundup/scripts/roundup_mailgw.py ============================================================================== --- tracker/roundup-src/roundup/scripts/roundup_mailgw.py (original) +++ tracker/roundup-src/roundup/scripts/roundup_mailgw.py Thu Aug 4 15:46:52 2011 @@ -105,6 +105,11 @@ This supports the same notation as IMAP. imaps username:password at server [mailbox] +IMAPS_CRAM: + Connect to an IMAP server over ssl using CRAM-MD5 authentication. + This supports the same notation as IMAP. + imaps_cram username:password at server [mailbox] + """)%{'program': args[0]} return 1 @@ -153,7 +158,7 @@ source, specification = args[1:3] # time out net connections after a minute if we can - if source not in ('mailbox', 'imaps'): + if source not in ('mailbox', 'imaps', 'imaps_cram'): if hasattr(socket, 'setdefaulttimeout'): socket.setdefaulttimeout(60) @@ -189,14 +194,19 @@ elif source == 'apop': return handler.do_apop(server, username, password) elif source.startswith('imap'): - ssl = source.endswith('s') + ssl = cram = 0 + if source.endswith('s'): + ssl = 1 + elif source.endswith('s_cram'): + ssl = cram = 1 mailbox = '' if len(args) > 3: mailbox = args[3] - return handler.do_imap(server, username, password, mailbox, ssl) + return handler.do_imap(server, username, password, mailbox, ssl, + cram) return usage(argv, _('Error: The source must be either "mailbox",' - ' "pop", "pops", "apop", "imap" or "imaps"')) + ' "pop", "pops", "apop", "imap", "imaps" or "imaps_cram')) def run(): sys.exit(main(sys.argv)) Modified: tracker/roundup-src/roundup/scripts/roundup_server.py ============================================================================== --- tracker/roundup-src/roundup/scripts/roundup_server.py (original) +++ tracker/roundup-src/roundup/scripts/roundup_server.py Thu Aug 4 15:46:52 2011 @@ -29,8 +29,6 @@ except ImportError: SSL = None -from time import sleep - # python version check from roundup import configuration, version_check from roundup import __version__ as roundup_version @@ -76,7 +74,7 @@ def auto_ssl(): print _('WARNING: generating temporary SSL certificate') - import OpenSSL, time, random, sys + import OpenSSL, random pkey = OpenSSL.crypto.PKey() pkey.generate_key(OpenSSL.crypto.TYPE_RSA, 768) cert = OpenSSL.crypto.X509() @@ -124,14 +122,11 @@ def readline(self, *args): """ SSL.Connection can return WantRead """ - line = None - while not line: + while True: try: - line = self.__fileobj.readline(*args) + return self.__fileobj.readline(*args) except SSL.WantReadError: - sleep (.1) - line = None - return line + time.sleep(.1) def read(self, *args): """ SSL.Connection can return WantRead """ @@ -139,7 +134,7 @@ try: return self.__fileobj.read(*args) except SSL.WantReadError: - sleep (.1) + time.sleep(.1) def __getattr__(self, attrib): return getattr(self.__fileobj, attrib) @@ -193,8 +188,6 @@ """ Execute the CGI command. Wrap an innner call in an error handler so all errors can be caught. """ - save_stdin = sys.stdin - sys.stdin = self.rfile try: self.inner_run_cgi() except client.NotFound: @@ -231,7 +224,6 @@ # out to the logfile print 'EXCEPTION AT', ts traceback.print_exc() - sys.stdin = save_stdin def run_cgi_outer(self): "Log requests that are in progress" @@ -377,10 +369,16 @@ env['SCRIPT_NAME'] = '' env['SERVER_NAME'] = self.server.server_name env['SERVER_PORT'] = str(self.server.server_port) - env['HTTP_HOST'] = self.headers['host'] + try: + env['HTTP_HOST'] = self.headers ['host'] + except KeyError: + env['HTTP_HOST'] = '' if os.environ.has_key('CGI_SHOW_TIMING'): env['CGI_SHOW_TIMING'] = os.environ['CGI_SHOW_TIMING'] env['HTTP_ACCEPT_LANGUAGE'] = self.headers.get('accept-language') + range = self.headers.getheader('range') + if range: + env['HTTP_RANGE'] = range # do the roundup thing tracker = self.get_tracker(tracker_name) @@ -481,9 +479,16 @@ SETTINGS = ( ("main", ( - (configuration.Option, "host", "", + (configuration.Option, "host", "localhost", "Host name of the Roundup web server instance.\n" - "If empty, listen on all network interfaces."), + "If left unconfigured (no 'host' setting) the default\n" + "will be used.\n" + "If empty, listen on all network interfaces.\n" + "If you want to explicitly listen on all\n" + "network interfaces, the address 0.0.0.0 is a more\n" + "explicit way to achieve this, the use of an empty\n" + "string for this purpose is deprecated and will go away\n" + "in a future release."), (configuration.IntegerNumberOption, "port", DEFAULT_PORT, "Port to listen on."), (configuration.NullableFilePathOption, "favicon", "favicon.ico", @@ -607,9 +612,28 @@ DEBUG_MODE = self["MULTIPROCESS"] == "debug" CONFIG = self + def setup(self): + if self.CONFIG["SSL"]: + # perform initial ssl handshake. This will set + # internal state correctly so that later closing SSL + # socket works (with SSL end-handshake started) + self.request.do_handshake() + RoundupRequestHandler.setup(self) + + def finish(self): + RoundupRequestHandler.finish(self) + if self.CONFIG["SSL"]: + self.request.shutdown() + self.request.close() + if self["SSL"]: base_server = SecureHTTPServer else: + # time out after a minute if we can + # This sets the socket to non-blocking. SSL needs a blocking + # socket, so we do this only for non-SSL connections. + if hasattr(socket, 'setdefaulttimeout'): + socket.setdefaulttimeout(60) base_server = BaseHTTPServer.HTTPServer # obtain request server class @@ -731,7 +755,10 @@ -h print this text and exit -S create or update configuration file and exit -C <fname> use configuration file <fname> - -n <name> set the host name of the Roundup web server instance + -n <name> set the host name of the Roundup web server instance, + specifies on which network interfaces to listen for + connections, defaults to localhost, use 0.0.0.0 to bind + to all network interfaces -p <port> set the port to listen on (default: %(port)s) -l <fname> log to the file indicated by fname instead of stderr/stdout -N log client machine names instead of IP addresses (much slower) @@ -831,10 +858,6 @@ def run(port=undefined, success_message=None): ''' Script entry point - handle args and figure out what to to. ''' - # time out after a minute if we can - if hasattr(socket, 'setdefaulttimeout'): - socket.setdefaulttimeout(60) - config = ServerConfig() # additional options short_options = "hvS" Modified: tracker/roundup-src/roundup/security.py ============================================================================== --- tracker/roundup-src/roundup/security.py (original) +++ tracker/roundup-src/roundup/security.py Thu Aug 4 15:46:52 2011 @@ -54,6 +54,28 @@ # we have a winner return 1 + def searchable(self, classname, property): + """ A Permission is searchable for the given permission if it + doesn't include a check method and otherwise matches the + given parameters. + """ + if self.name not in ('View', 'Search'): + return 0 + + # are we checking the correct class + if self.klass is not None and self.klass != classname: + return 0 + + # what about property? + if not self._properties_dict[property]: + return 0 + + if self.check: + return 0 + + return 1 + + def __repr__(self): return '<Permission 0x%x %r,%r,%r,%r>'%(id(self), self.name, self.klass, self.properties, self.check) @@ -162,12 +184,9 @@ Note that this functionality is actually implemented by the Permission.test() method. ''' - roles = self.db.user.get(userid, 'roles') - if roles is None: - return 0 if itemid and classname is None: raise ValueError, 'classname must accompany itemid' - for rolename in [x.lower().strip() for x in roles.split(',')]: + for rolename in self.db.user.get_roles(userid): if not rolename or not self.role.has_key(rolename): continue # for each of the user's Roles, check the permissions @@ -178,6 +197,81 @@ return 1 return 0 + def roleHasSearchPermission(self, classname, property, *rolenames): + """ For each of the given roles, check the permissions. + Property can be a transitive property. + """ + perms = [] + # pre-compute permissions + for rn in rolenames : + for perm in self.role[rn].permissions: + perms.append(perm) + # Note: break from inner loop means "found" + # break from outer loop means "not found" + cn = classname + prev = None + prop = None + Link = hyperdb.Link + Multilink = hyperdb.Multilink + for propname in property.split('.'): + if prev: + try: + cn = prop.classname + except AttributeError: + break + prev = propname + try: + cls = self.db.getclass(cn) + prop = cls.getprops()[propname] + except KeyError: + break + for perm in perms: + if perm.searchable(cn, propname): + break + else: + break + else: + # for Link and Multilink require search permission on label- + # and order-properties and on ID + if isinstance(prop, Multilink) or isinstance(prop, Link): + try: + cls = self.db.getclass(prop.classname) + except KeyError: + return 0 + props = dict.fromkeys(('id', cls.labelprop(), cls.orderprop())) + for p in props.iterkeys(): + for perm in perms: + if perm.searchable(prop.classname, p): + break + else: + return 0 + return 1 + return 0 + + def hasSearchPermission(self, userid, classname, property): + '''Look through all the Roles, and hence Permissions, and + see if "permission" exists given the constraints of + classname and property. + + A search permission is granted if we find a 'View' or + 'Search' permission for the user which does *not* include + a check function. If such a permission is found, the user may + search for the given property in the given class. + + Note that classname *and* property are mandatory arguments. + + Contrary to hasPermission, the search will *not* match if + there are additional constraints (namely a search function) + on a Permission found. + + Concerning property, the Permission matched must have + either no properties listed or the property must appear in + the list. + ''' + roles = [r for r in self.db.user.get_roles(userid) + if r and self.role.has_key(r)] + return self.roleHasSearchPermission (classname, property, *roles) + def addPermission(self, **propspec): ''' Create a new Permission with the properties defined in 'propspec'. See the Permission class for the possible @@ -211,4 +305,22 @@ role = self.role[rolename.lower()] role.permissions.append(permission) + # Convenience methods for removing non-allowed properties from a + # filterspec or sort/group list + + def filterFilterspec(self, userid, classname, filterspec): + """ Return a filterspec that has all non-allowed properties removed. + """ + return dict ([(k, v) for k, v in filterspec.iteritems() + if self.hasSearchPermission(userid,classname,k)]) + + def filterSortspec(self, userid, classname, sort): + """ Return a sort- or group-list that has all non-allowed properties + removed. + """ + if isinstance(sort, tuple) and sort[0] in '+-': + sort = [sort] + return [(d, p) for d, p in sort + if self.hasSearchPermission(userid,classname,p)] + # vim: set filetype=python sts=4 sw=4 et si : Modified: tracker/roundup-src/roundup/xmlrpc.py ============================================================================== --- tracker/roundup-src/roundup/xmlrpc.py (original) +++ tracker/roundup-src/roundup/xmlrpc.py Thu Aug 4 15:46:52 2011 @@ -10,6 +10,7 @@ from roundup.date import Date, Range, Interval from roundup import actions from SimpleXMLRPCServer import * +from xmlrpclib import Binary def translate(value): """Translate value to becomes valid for XMLRPC transmission.""" @@ -32,12 +33,19 @@ props = {} for arg in args: - if arg.find('=') == -1: + if isinstance(arg, Binary): + arg = arg.data + try : + key, value = arg.split('=', 1) + except ValueError : raise UsageError, 'argument "%s" not propname=value'%arg - l = arg.split('=') - if len(l) < 2: - raise UsageError, 'argument "%s" not propname=value'%arg - key, value = l[0], '='.join(l[1:]) + if isinstance(key, unicode): + try: + key = key.encode ('ascii') + except UnicodeEncodeError: + raise UsageError, 'argument %r is no valid ascii keyword'%key + if isinstance(value, unicode): + value = value.encode('utf-8') if value: try: props[key] = hyperdb.rawToHyperdb(db, cl, itemid, @@ -81,8 +89,24 @@ def filter(self, classname, search_matches, filterspec, sort=[], group=[]): cl = self.db.getclass(classname) + uid = self.db.getuid() + security = self.db.security + filterspec = security.filterFilterspec (uid, classname, filterspec) + sort = security.filterSortspec (uid, classname, sort) + group = security.filterSortspec (uid, classname, group) result = cl.filter(search_matches, filterspec, sort=sort, group=group) - return result + check = security.hasPermission + x = [id for id in result if check('View', uid, classname, itemid=id)] + return x + + def lookup(self, classname, key): + cl = self.db.getclass(classname) + uid = self.db.getuid() + prop = cl.getkey() + check = self.db.security.hasSearchPermission + if not check(uid, classname, 'id') or not check(uid, classname, prop): + raise Unauthorised('Permission to search %s denied'%classname) + return cl.lookup(key) def display(self, designator, *properties): classname, itemid = hyperdb.splitDesignator(designator) @@ -113,9 +137,9 @@ raise UsageError, 'you must provide the "%s" property.'%key for key in props: - if not self.db.security.hasPermission('Edit', self.db.getuid(), classname, - property=key): - raise Unauthorised('Permission to set %s.%s denied'%(classname, key)) + if not self.db.security.hasPermission('Create', self.db.getuid(), + classname, property=key): + raise Unauthorised('Permission to create %s.%s denied'%(classname, key)) # do the actual create try: Modified: tracker/roundup-src/scripts/imapServer.py ============================================================================== --- tracker/roundup-src/scripts/imapServer.py (original) +++ tracker/roundup-src/scripts/imapServer.py Thu Aug 4 15:46:52 2011 @@ -39,7 +39,7 @@ import time logging.basicConfig() -log = logging.getLogger('IMAPServer') +log = logging.getLogger('roundup.IMAPServer') version = '0.1.2' Modified: tracker/roundup-src/setup.py ============================================================================== --- tracker/roundup-src/setup.py (original) +++ tracker/roundup-src/setup.py Thu Aug 4 15:46:52 2011 @@ -23,6 +23,7 @@ from roundup.dist.command.build_py import build_py from roundup.dist.command.build import build, list_message_files from roundup.dist.command.bdist_rpm import bdist_rpm +from roundup.dist.command.install_lib import install_lib from distutils.core import setup import sys, os @@ -94,6 +95,20 @@ # perform the setup action from roundup import __version__ + # long_description may not contain non-ascii characters. Distutils + # will produce an non-installable installer on linux *and* we can't + # run the bdist_wininst on Linux if there are non-ascii characters + # because the distutils installer will try to use the mbcs codec + # which isn't available on non-windows platforms. See also + # http://bugs.python.org/issue10945 + long_description=open('doc/announcement.txt').read().decode('utf8') + try: + long_description = long_description.encode('ascii') + except UnicodeEncodeError, cause: + print >> sys.stderr, "doc/announcement.txt contains non-ascii: %s" \ + % cause + sys.exit(42) + setup(name='roundup', version=__version__, author="Richard Jones", @@ -101,69 +116,7 @@ description="A simple-to-use and -install issue-tracking system" " with command-line, web and e-mail interfaces. Highly" " customisable.", - long_description='''This version of Roundup fixes some bugs: - -- Minor update of doc/developers.txt to point to the new resources - on www.roundup-tracker.org (Bernhard Reiter) -- Small CSS improvements regaring the search box (thanks Thomas Arendsan Hein) - (issue 2550589) -- Indexers behaviour made more consistent regarding length of indexed words - and stopwords (thanks Thomas Arendsen Hein, Bernhard Reiter)(issue 2550584) -- fixed typos in the installation instructions (thanks Thomas Arendsen Hein) - (issue 2550573) -- New config option csv_field_size: Pythons csv module (which is used - for export/import) has a new field size limit starting with python2.5. - We now issue a warning during export if the limit is too small and use - the csv_field_size configuration during import to set the limit for - the csv module. -- Small fix for CGI-handling of XMLRPC requests for python2.4, this - worked only for 2.5 and beyond due to a change in the xmlrpc interface - in python -- Document filter method of xmlrpc interface -- Fix interaction of SSL and XMLRPC, now XMLRPC works with SSL - -If you're upgrading from an older version of Roundup you *must* follow -the "Software Upgrade" guidelines given in the maintenance documentation. - -Roundup requires python 2.3 or later (but not 3+) for correct operation. - -To give Roundup a try, just download (see below), unpack and run:: - - roundup-demo - -Documentation is available at the website: - http://roundup.sourceforge.net/ -Mailing lists - the place to ask questions: - http://sourceforge.net/mail/?group_id=31577 - -About Roundup -============= - -Roundup is a simple-to-use and -install issue-tracking system with -command-line, web and e-mail interfaces. It is based on the winning design -from Ka-Ping Yee in the Software Carpentry "Track" design competition. - -Note: Ping is not responsible for this project. The contact for this -project is richard at users.sourceforge.net. - -Roundup manages a number of issues (with flexible properties such as -"description", "priority", and so on) and provides the ability to: - -(a) submit new issues, -(b) find and edit existing issues, and -(c) discuss issues with other participants. - -The system will facilitate communication among the participants by managing -discussions and notifying interested parties when issues are edited. One of -the major design goals for Roundup that it be simple to get going. Roundup -is therefore usable "out of the box" with any python 2.3+ (but not 3+) -installation. It doesn't even need to be "installed" to be operational, -though an install script is provided. - -It comes with two issue tracker templates (a classic bug/feature tracker and -a minimal skeleton) and five database back-ends (anydbm, sqlite, metakit, -mysql and postgresql). -''', + long_description=long_description, url='http://www.roundup-tracker.org', download_url='http://pypi.python.org/pypi/roundup', classifiers=['Development Status :: 5 - Production/Stable', @@ -188,6 +141,7 @@ 'build_py': build_py, 'build': build, 'bdist_rpm': bdist_rpm, + 'install_lib': install_lib, }, packages=packages, py_modules=py_modules, Modified: tracker/roundup-src/share/roundup/templates/classic/html/_generic.index.html ============================================================================== --- tracker/roundup-src/share/roundup/templates/classic/html/_generic.index.html (original) +++ tracker/roundup-src/share/roundup/templates/classic/html/_generic.index.html Thu Aug 4 15:46:52 2011 @@ -39,7 +39,8 @@ <p class="form-help"> Remove entries by deleting their line. Add new entries by appending - them to the table - put an X in the id column. + them to the table - put an X in the id column. If you wish to restore a + removed item and you know its id then just put that id in the id column. </p> </tal:block> <form onSubmit="return submit_once()" method="POST" Added: tracker/roundup-src/share/roundup/templates/classic/html/_generic.keywords_expr.html ============================================================================== --- (empty file) +++ tracker/roundup-src/share/roundup/templates/classic/html/_generic.keywords_expr.html Thu Aug 4 15:46:52 2011 @@ -0,0 +1,11 @@ +<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd"> +<html> + <head> + <link rel="stylesheet" type="text/css" href="@@file/style.css" /> + <meta http-equiv="Content-Type" content="text/html; charset=utf-8;" /> + <title tal:content="string:Roundup Keywords Expression Editor"> + + + + Modified: tracker/roundup-src/share/roundup/templates/classic/html/issue.search.html ============================================================================== --- tracker/roundup-src/share/roundup/templates/classic/html/issue.search.html (original) +++ tracker/roundup-src/share/roundup/templates/classic/html/issue.search.html Thu Aug 4 15:46:52 2011 @@ -12,10 +12,10 @@ cols python:request.columns or 'id activity title status assignedto'.split(); sort_on python:request.sort and request.sort[0] or nothing; sort_desc python:sort_on and sort_on[0] == '-'; - sort_on python:(sort_on and sort_on[1]) or 'activity'; + sort_on python:(sort_on and sort_on[1]) or (not request.nodeid and 'activity') or ''; group_on python:request.group and request.group[0] or nothing; group_desc python:group_on and group_on[0] == '-'; - group_on python:(group_on and group_on[1]) or 'priority'; + group_on python:(group_on and group_on[1]) or (not request.nodeid and 'priority') or ''; search_input templates/page/macros/search_input; search_date templates/page/macros/search_date; @@ -23,6 +23,7 @@ sort_input templates/page/macros/sort_input; group_input templates/page/macros/group_input; search_select templates/page/macros/search_select; + search_select_keywords templates/page/macros/search_select_keywords; search_select_translated templates/page/macros/search_select_translated; search_multiselect templates/page/macros/search_multiselect;"> @@ -54,7 +55,7 @@ db_klass string:keyword; db_content string:name;"> Keyword: - + @@ -167,8 +168,8 @@ No Sort or group:     - - + + Modified: tracker/roundup-src/share/roundup/templates/classic/html/page.html ============================================================================== --- tracker/roundup-src/share/roundup/templates/classic/html/page.html (original) +++ tracker/roundup-src/share/roundup/templates/classic/html/page.html Thu Aug 4 15:46:52 2011 @@ -231,7 +231,7 @@ - @@ -247,6 +247,22 @@ + + + + Modified: tracker/roundup-src/share/roundup/templates/classic/html/style.css ============================================================================== --- tracker/roundup-src/share/roundup/templates/classic/html/style.css (original) +++ tracker/roundup-src/share/roundup/templates/classic/html/style.css Thu Aug 4 15:46:52 2011 @@ -413,6 +413,7 @@ font-weight: bold; text-align: left; } + input[type="text"]:focus, input[type="checkbox"]:focus, input[type="radio"]:focus, @@ -421,5 +422,17 @@ background-color: #ffffc0; } +.calendar_display { + text-align: center; +} + +.calendar_display td { + padding: 1px 4px 1px 4px; +} + +.calendar_display .today { + background-color: #afafaf; +} + /* vim: sts=2 sw=2 et */ Modified: tracker/roundup-src/share/roundup/templates/classic/schema.py ============================================================================== --- tracker/roundup-src/share/roundup/templates/classic/schema.py (original) +++ tracker/roundup-src/share/roundup/templates/classic/schema.py Thu Aug 4 15:46:52 2011 @@ -112,6 +112,8 @@ description="User is allowed to view their own user details") db.security.addPermissionToRole('User', p) p = db.security.addPermission(name='Edit', klass='user', check=own_record, + properties=('username', 'password', 'address', 'realname', 'phone', + 'organisation', 'alternate_addresses', 'queries', 'timezone'), description="User is allowed to edit their own user details") db.security.addPermissionToRole('User', p) @@ -127,6 +129,8 @@ p = db.security.addPermission(name='View', klass='query', check=view_query, description="User is allowed to view their own and public queries") db.security.addPermissionToRole('User', p) +p = db.security.addPermission(name='Search', klass='query') +db.security.addPermissionToRole('User', p) p = db.security.addPermission(name='Edit', klass='query', check=edit_query, description="User is allowed to edit their queries") db.security.addPermissionToRole('User', p) Modified: tracker/roundup-src/share/roundup/templates/minimal/html/_generic.index.html ============================================================================== --- tracker/roundup-src/share/roundup/templates/minimal/html/_generic.index.html (original) +++ tracker/roundup-src/share/roundup/templates/minimal/html/_generic.index.html Thu Aug 4 15:46:52 2011 @@ -39,7 +39,8 @@

    Remove entries by deleting their line. Add new entries by appending - them to the table - put an X in the id column. + them to the table - put an X in the id column. If you wish to restore a + removed item and you know its id then just put that id in the id column.

    - Modified: tracker/roundup-src/share/roundup/templates/minimal/html/style.css ============================================================================== --- tracker/roundup-src/share/roundup/templates/minimal/html/style.css (original) +++ tracker/roundup-src/share/roundup/templates/minimal/html/style.css Thu Aug 4 15:46:52 2011 @@ -50,9 +50,6 @@ padding: 5px; border-bottom: 1px solid #444; } -#searchbox { - float: right; -} div#body-title { float: left; @@ -127,7 +124,7 @@ /* style for search forms */ ul.search-checkboxes { display: inline; - padding: none; + padding: 0; list-style: none; } ul.search-checkboxes > li { @@ -421,3 +418,26 @@ font-weight: bold; text-align: left; } + +input[type="text"]:focus, +input[type="checkbox"]:focus, +input[type="radio"]:focus, +input[type="password"]:focus, +textarea:focus, select:focus { + background-color: #ffffc0; +} + +.calendar_display { + text-align: center; +} + +.calendar_display td { + padding: 1px 4px 1px 4px; +} + +.calendar_display .today { + background-color: #afafaf; +} + +/* vim: sts=2 sw=2 et +*/ Modified: tracker/roundup-src/share/roundup/templates/minimal/schema.py ============================================================================== --- tracker/roundup-src/share/roundup/templates/minimal/schema.py (original) +++ tracker/roundup-src/share/roundup/templates/minimal/schema.py Thu Aug 4 15:46:52 2011 @@ -41,6 +41,7 @@ description="User is allowed to view their own user details") db.security.addPermissionToRole('User', p) p = db.security.addPermission(name='Edit', klass='user', check=own_record, + properties=('username', 'password', 'address', 'alternate_addresses'), description="User is allowed to edit their own user details") db.security.addPermissionToRole('User', p) Modified: tracker/roundup-src/test/db_test_base.py ============================================================================== --- tracker/roundup-src/test/db_test_base.py (original) +++ tracker/roundup-src/test/db_test_base.py Thu Aug 4 15:46:52 2011 @@ -24,7 +24,8 @@ from roundup.hyperdb import String, Password, Link, Multilink, Date, \ Interval, DatabaseError, Boolean, Number, Node from roundup.mailer import Mailer -from roundup import date, password, init, instance, configuration, support +from roundup import date, password, init, instance, configuration, \ + roundupdb, i18n from mocknull import MockNull @@ -34,6 +35,7 @@ config.RDBMS_HOST = "localhost" config.RDBMS_USER = "rounduptest" config.RDBMS_PASSWORD = "rounduptest" +config.RDBMS_TEMPLATE = "template0" #config.logging = MockNull() # these TRACKER_WEB and MAIL_DOMAIN values are used in mailgw tests config.MAIL_DOMAIN = "your.tracker.email.domain.example" @@ -113,6 +115,9 @@ priority.create(name="bug", order="1") db.commit() + # nosy tests require this + db.security.addPermissionToRole('User', 'View', 'msg') + class MyTestCase(unittest.TestCase): def tearDown(self): if hasattr(self, 'db'): @@ -120,21 +125,72 @@ if os.path.exists(config.DATABASE): shutil.rmtree(config.DATABASE) + def open_database(self): + self.db = self.module.Database(config, 'admin') + + if os.environ.has_key('LOGGING_LEVEL'): from roundup import rlog config.logging = rlog.BasicLogging() config.logging.setLevel(os.environ['LOGGING_LEVEL']) - config.logging.getLogger('hyperdb').setFormat('%(message)s') + config.logging.getLogger('roundup.hyperdb').setFormat('%(message)s') -class DBTest(MyTestCase): +class commonDBTest(MyTestCase): def setUp(self): # remove previous test, ignore errors if os.path.exists(config.DATABASE): shutil.rmtree(config.DATABASE) os.makedirs(config.DATABASE + '/files') - self.db = self.module.Database(config, 'admin') + self.open_database() setupSchema(self.db, 1, self.module) + def iterSetup(self, classname='issue'): + cls = getattr(self.db, classname) + def filt_iter(*args): + """ for checking equivalence of filter and filter_iter """ + return list(cls.filter_iter(*args)) + return self.assertEqual, cls.filter, filt_iter + + def filteringSetupTransitiveSearch(self, classname='issue'): + u_m = {} + k = 30 + for user in ( + {'username': 'ceo', 'age': 129}, + {'username': 'grouplead1', 'age': 29, 'supervisor': '3'}, + {'username': 'grouplead2', 'age': 29, 'supervisor': '3'}, + {'username': 'worker1', 'age': 25, 'supervisor' : '4'}, + {'username': 'worker2', 'age': 24, 'supervisor' : '4'}, + {'username': 'worker3', 'age': 23, 'supervisor' : '5'}, + {'username': 'worker4', 'age': 22, 'supervisor' : '5'}, + {'username': 'worker5', 'age': 21, 'supervisor' : '5'}): + u = self.db.user.create(**user) + u_m [u] = self.db.msg.create(author = u, content = ' ' + , date = date.Date ('2006-01-%s' % k)) + k -= 1 + i = date.Interval('-1d') + for issue in ( + {'title': 'ts1', 'status': '2', 'assignedto': '6', + 'priority': '3', 'messages' : [u_m ['6']], 'nosy' : ['4']}, + {'title': 'ts2', 'status': '1', 'assignedto': '6', + 'priority': '3', 'messages' : [u_m ['6']], 'nosy' : ['5']}, + {'title': 'ts4', 'status': '2', 'assignedto': '7', + 'priority': '3', 'messages' : [u_m ['7']]}, + {'title': 'ts5', 'status': '1', 'assignedto': '8', + 'priority': '3', 'messages' : [u_m ['8']]}, + {'title': 'ts6', 'status': '2', 'assignedto': '9', + 'priority': '3', 'messages' : [u_m ['9']]}, + {'title': 'ts7', 'status': '1', 'assignedto': '10', + 'priority': '3', 'messages' : [u_m ['10']]}, + {'title': 'ts8', 'status': '2', 'assignedto': '10', + 'priority': '3', 'messages' : [u_m ['10']], 'foo' : i}, + {'title': 'ts9', 'status': '1', 'assignedto': '10', + 'priority': '3', 'messages' : [u_m ['10'], u_m ['9']]}): + self.db.issue.create(**issue) + return self.iterSetup(classname) + + +class DBTest(commonDBTest): + def testRefresh(self): self.db.refresh_database() @@ -144,11 +200,7 @@ def testCreatorProperty(self): i = self.db.issue id1 = i.create(title='spam') - self.db.commit() - self.db.close() - self.db = self.module.Database(config, 'fred') - setupSchema(self.db, 0, self.module) - i = self.db.issue + self.db.journaltag = 'fred' id2 = i.create(title='spam') self.assertNotEqual(id1, id2) self.assertNotEqual(i.get(id1, 'creator'), i.get(id2, 'creator')) @@ -156,11 +208,7 @@ def testActorProperty(self): i = self.db.issue id1 = i.create(title='spam') - self.db.commit() - self.db.close() - self.db = self.module.Database(config, 'fred') - setupSchema(self.db, 0, self.module) - i = self.db.issue + self.db.journaltag = 'fred' i.set(id1, title='asfasd') self.assertNotEqual(i.get(id1, 'creator'), i.get(id1, 'actor')) @@ -273,6 +321,23 @@ if commit: self.db.commit() self.assertEqual(self.db.issue.get(nid, "nosy"), []) + def testMakeSeveralMultilinkedNodes(self): + for commit in (0,1): + u1 = self.db.user.create(username='foo%s'%commit) + u2 = self.db.user.create(username='bar%s'%commit) + u3 = self.db.user.create(username='baz%s'%commit) + nid = self.db.issue.create(title="spam", nosy=[u1]) + if commit: self.db.commit() + self.assertEqual(self.db.issue.get(nid, "nosy"), [u1]) + self.db.issue.set(nid, deadline=date.Date('.')) + self.db.issue.set(nid, nosy=[u1,u2], title='ta%s'%commit) + if commit: self.db.commit() + self.assertEqual(self.db.issue.get(nid, "nosy"), [u1,u2]) + self.db.issue.set(nid, deadline=date.Date('.')) + self.db.issue.set(nid, nosy=[u1,u2,u3], title='tb%s'%commit) + if commit: self.db.commit() + self.assertEqual(self.db.issue.get(nid, "nosy"), [u1,u2,u3]) + def testMultilinkChangeIterable(self): for commit in (0,1): # invalid nosy value assertion @@ -344,11 +409,15 @@ '2008-02-29.00:00:00') self.assertEquals(self.db.issue.filter(None, {'deadline': '2008-02-29'}), [nid]) + self.assertEquals(list(self.db.issue.filter_iter(None, + {'deadline': '2008-02-29'})), [nid]) self.db.issue.set(nid, deadline=date.Date('2008-03-01')) self.assertEquals(str(self.db.issue.get(nid, 'deadline')), '2008-03-01.00:00:00') self.assertEquals(self.db.issue.filter(None, {'deadline': '2008-02-29'}), []) + self.assertEquals(list(self.db.issue.filter_iter(None, + {'deadline': '2008-02-29'})), []) def testDateUnset(self): for commit in (0,1): @@ -852,6 +921,7 @@ self.assertEquals(self.db.indexer.search([], self.db.issue), {}) self.assertEquals(self.db.indexer.search(['hello'], self.db.issue), {i1: {'files': [f1]}}) + # content='world' has the wrong content-type and shouldn't be indexed self.assertEquals(self.db.indexer.search(['world'], self.db.issue), {}) self.assertEquals(self.db.indexer.search(['frooz'], self.db.issue), {i2: {}}) @@ -960,45 +1030,17 @@ self.assertEquals(self.db.indexer.search(['flebble'], self.db.issue), {'1': {}}) - def testIndexingOnImport(self): - # import a message - msgcontent = 'Glrk' - msgid = self.db.msg.import_list(['content', 'files', 'recipients'], - [repr(msgcontent), '[]', '[]']) - msg_filename = self.db.filename(self.db.msg.classname, msgid, - create=1) - support.ensureParentsExist(msg_filename) - msg_file = open(msg_filename, 'w') - msg_file.write(msgcontent) - msg_file.close() - - # import a file - filecontent = 'Brrk' - fileid = self.db.file.import_list(['content'], [repr(filecontent)]) - file_filename = self.db.filename(self.db.file.classname, fileid, - create=1) - support.ensureParentsExist(file_filename) - file_file = open(file_filename, 'w') - file_file.write(filecontent) - file_file.close() - + def testIndexingPropertiesOnImport(self): # import an issue title = 'Bzzt' nodeid = self.db.issue.import_list(['title', 'messages', 'files', - 'spam', 'nosy', 'superseder'], [repr(title), repr([msgid]), - repr([fileid]), '[]', '[]', '[]']) + 'spam', 'nosy', 'superseder'], [repr(title), '[]', '[]', + '[]', '[]', '[]']) self.db.commit() # Content of title attribute is indexed self.assertEquals(self.db.indexer.search([title], self.db.issue), {str(nodeid):{}}) - # Content of message is indexed - self.assertEquals(self.db.indexer.search([msgcontent], self.db.issue), - {str(nodeid):{'messages':[str(msgid)]}}) - # Content of file is indexed - self.assertEquals(self.db.indexer.search([filecontent], self.db.issue), - {str(nodeid):{'files':[str(fileid)]}}) - # @@ -1113,13 +1155,12 @@ self.db.issue.retire(ids[0]) self.assertEqual(len(self.db.issue.stringFind(title='spam')), 1) - def filteringSetup(self): + def filteringSetup(self, classname='issue'): for user in ( - {'username': 'bleep', 'age': 1}, - {'username': 'blop', 'age': 1.5}, - {'username': 'blorp', 'age': 2}): + {'username': 'bleep', 'age': 1, 'assignable': True}, + {'username': 'blop', 'age': 1.5, 'assignable': True}, + {'username': 'blorp', 'age': 2, 'assignable': False}): self.db.user.create(**user) - iss = self.db.issue file_content = ''.join([chr(i) for i in range(255)]) f = self.db.file.create(content=file_content) for issue in ( @@ -1137,92 +1178,133 @@ 'files': [f]}): self.db.issue.create(**issue) self.db.commit() - return self.assertEqual, self.db.issue.filter + return self.iterSetup(classname) def testFilteringID(self): - ae, filt = self.filteringSetup() - ae(filt(None, {'id': '1'}, ('+','id'), (None,None)), ['1']) - ae(filt(None, {'id': '2'}, ('+','id'), (None,None)), ['2']) - ae(filt(None, {'id': '100'}, ('+','id'), (None,None)), []) + ae, filter, filter_iter = self.filteringSetup() + for filt in filter, filter_iter: + ae(filt(None, {'id': '1'}, ('+','id'), (None,None)), ['1']) + ae(filt(None, {'id': '2'}, ('+','id'), (None,None)), ['2']) + ae(filt(None, {'id': '100'}, ('+','id'), (None,None)), []) + + def testFilteringBoolean(self): + ae, filter, filter_iter = self.filteringSetup('user') + a = 'assignable' + for filt in filter, filter_iter: + ae(filt(None, {a: '1'}, ('+','id'), (None,None)), ['3','4']) + ae(filt(None, {a: '0'}, ('+','id'), (None,None)), ['5']) + ae(filt(None, {a: ['1']}, ('+','id'), (None,None)), ['3','4']) + ae(filt(None, {a: ['0']}, ('+','id'), (None,None)), ['5']) + ae(filt(None, {a: ['0','1']}, ('+','id'), (None,None)), + ['3','4','5']) + ae(filt(None, {a: 'True'}, ('+','id'), (None,None)), ['3','4']) + ae(filt(None, {a: 'False'}, ('+','id'), (None,None)), ['5']) + ae(filt(None, {a: ['True']}, ('+','id'), (None,None)), ['3','4']) + ae(filt(None, {a: ['False']}, ('+','id'), (None,None)), ['5']) + ae(filt(None, {a: ['False','True']}, ('+','id'), (None,None)), + ['3','4','5']) + ae(filt(None, {a: True}, ('+','id'), (None,None)), ['3','4']) + ae(filt(None, {a: False}, ('+','id'), (None,None)), ['5']) + ae(filt(None, {a: 1}, ('+','id'), (None,None)), ['3','4']) + ae(filt(None, {a: 0}, ('+','id'), (None,None)), ['5']) + ae(filt(None, {a: [1]}, ('+','id'), (None,None)), ['3','4']) + ae(filt(None, {a: [0]}, ('+','id'), (None,None)), ['5']) + ae(filt(None, {a: [0,1]}, ('+','id'), (None,None)), ['3','4','5']) + ae(filt(None, {a: [True]}, ('+','id'), (None,None)), ['3','4']) + ae(filt(None, {a: [False]}, ('+','id'), (None,None)), ['5']) + ae(filt(None, {a: [False,True]}, ('+','id'), (None,None)), + ['3','4','5']) def testFilteringNumber(self): - self.filteringSetup() - ae, filt = self.assertEqual, self.db.user.filter - ae(filt(None, {'age': '1'}, ('+','id'), (None,None)), ['3']) - ae(filt(None, {'age': '1.5'}, ('+','id'), (None,None)), ['4']) - ae(filt(None, {'age': '2'}, ('+','id'), (None,None)), ['5']) - ae(filt(None, {'age': ['1','2']}, ('+','id'), (None,None)), ['3','5']) + ae, filter, filter_iter = self.filteringSetup('user') + for filt in filter, filter_iter: + ae(filt(None, {'age': '1'}, ('+','id'), (None,None)), ['3']) + ae(filt(None, {'age': '1.5'}, ('+','id'), (None,None)), ['4']) + ae(filt(None, {'age': '2'}, ('+','id'), (None,None)), ['5']) + ae(filt(None, {'age': ['1','2']}, ('+','id'), (None,None)), + ['3','5']) + ae(filt(None, {'age': 2}, ('+','id'), (None,None)), ['5']) + ae(filt(None, {'age': [1,2]}, ('+','id'), (None,None)), ['3','5']) def testFilteringString(self): - ae, filt = self.filteringSetup() - ae(filt(None, {'title': ['one']}, ('+','id'), (None,None)), ['1']) - ae(filt(None, {'title': ['issue one']}, ('+','id'), (None,None)), - ['1']) - ae(filt(None, {'title': ['issue', 'one']}, ('+','id'), (None,None)), - ['1']) - ae(filt(None, {'title': ['issue']}, ('+','id'), (None,None)), - ['1','2','3']) - ae(filt(None, {'title': ['one', 'two']}, ('+','id'), (None,None)), - []) + ae, filter, filter_iter = self.filteringSetup() + for filt in filter, filter_iter: + ae(filt(None, {'title': ['one']}, ('+','id'), (None,None)), ['1']) + ae(filt(None, {'title': ['issue one']}, ('+','id'), (None,None)), + ['1']) + ae(filt(None, {'title': ['issue', 'one']}, ('+','id'), (None,None)), + ['1']) + ae(filt(None, {'title': ['issue']}, ('+','id'), (None,None)), + ['1','2','3']) + ae(filt(None, {'title': ['one', 'two']}, ('+','id'), (None,None)), + []) def testFilteringLink(self): - ae, filt = self.filteringSetup() - ae(filt(None, {'status': '1'}, ('+','id'), (None,None)), ['2','3']) - ae(filt(None, {'assignedto': '-1'}, ('+','id'), (None,None)), ['3','4']) - ae(filt(None, {'assignedto': None}, ('+','id'), (None,None)), ['3','4']) - ae(filt(None, {'assignedto': [None]}, ('+','id'), (None,None)), - ['3','4']) - ae(filt(None, {'assignedto': ['-1', None]}, ('+','id'), (None,None)), - ['3','4']) - ae(filt(None, {'assignedto': ['1', None]}, ('+','id'), (None,None)), - ['1', '3','4']) + ae, filter, filter_iter = self.filteringSetup() + a = 'assignedto' + grp = (None, None) + for filt in filter, filter_iter: + ae(filt(None, {'status': '1'}, ('+','id'), grp), ['2','3']) + ae(filt(None, {a: '-1'}, ('+','id'), grp), ['3','4']) + ae(filt(None, {a: None}, ('+','id'), grp), ['3','4']) + ae(filt(None, {a: [None]}, ('+','id'), grp), ['3','4']) + ae(filt(None, {a: ['-1', None]}, ('+','id'), grp), ['3','4']) + ae(filt(None, {a: ['1', None]}, ('+','id'), grp), ['1', '3','4']) def testFilteringMultilinkAndGroup(self): """testFilteringMultilinkAndGroup: See roundup Bug 1541128: apparently grouping by something and searching a Multilink failed with MySQL 5.0 """ - ae, filt = self.filteringSetup() - ae(filt(None, {'files': '1'}, ('-','activity'), ('+','status')), ['4']) + ae, filter, filter_iter = self.filteringSetup() + for f in filter, filter_iter: + ae(f(None, {'files': '1'}, ('-','activity'), ('+','status')), ['4']) def testFilteringRetired(self): - ae, filt = self.filteringSetup() + ae, filter, filter_iter = self.filteringSetup() self.db.issue.retire('2') - ae(filt(None, {'status': '1'}, ('+','id'), (None,None)), ['3']) + for f in filter, filter_iter: + ae(f(None, {'status': '1'}, ('+','id'), (None,None)), ['3']) def testFilteringMultilink(self): - ae, filt = self.filteringSetup() - ae(filt(None, {'nosy': '3'}, ('+','id'), (None,None)), ['4']) - ae(filt(None, {'nosy': '-1'}, ('+','id'), (None,None)), ['1', '2']) - ae(filt(None, {'nosy': ['1','2']}, ('+', 'status'), - ('-', 'deadline')), ['4', '3']) + ae, filter, filter_iter = self.filteringSetup() + for filt in filter, filter_iter: + ae(filt(None, {'nosy': '3'}, ('+','id'), (None,None)), ['4']) + ae(filt(None, {'nosy': '-1'}, ('+','id'), (None,None)), ['1', '2']) + ae(filt(None, {'nosy': ['1','2']}, ('+', 'status'), + ('-', 'deadline')), ['4', '3']) def testFilteringMany(self): - ae, filt = self.filteringSetup() - ae(filt(None, {'nosy': '2', 'status': '1'}, ('+','id'), (None,None)), - ['3']) + ae, filter, filter_iter = self.filteringSetup() + for f in filter, filter_iter: + ae(f(None, {'nosy': '2', 'status': '1'}, ('+','id'), (None,None)), + ['3']) def testFilteringRangeBasic(self): - ae, filt = self.filteringSetup() - ae(filt(None, {'deadline': 'from 2003-02-10 to 2003-02-23'}), ['1','3']) - ae(filt(None, {'deadline': '2003-02-10; 2003-02-23'}), ['1','3']) - ae(filt(None, {'deadline': '; 2003-02-16'}), ['2']) + ae, filter, filter_iter = self.filteringSetup() + d = 'deadline' + for f in filter, filter_iter: + ae(f(None, {d: 'from 2003-02-10 to 2003-02-23'}), ['1','3']) + ae(f(None, {d: '2003-02-10; 2003-02-23'}), ['1','3']) + ae(f(None, {d: '; 2003-02-16'}), ['2']) def testFilteringRangeTwoSyntaxes(self): - ae, filt = self.filteringSetup() - ae(filt(None, {'deadline': 'from 2003-02-16'}), ['1', '3', '4']) - ae(filt(None, {'deadline': '2003-02-16;'}), ['1', '3', '4']) + ae, filter, filter_iter = self.filteringSetup() + for filt in filter, filter_iter: + ae(filt(None, {'deadline': 'from 2003-02-16'}), ['1', '3', '4']) + ae(filt(None, {'deadline': '2003-02-16;'}), ['1', '3', '4']) def testFilteringRangeYearMonthDay(self): - ae, filt = self.filteringSetup() - ae(filt(None, {'deadline': '2002'}), []) - ae(filt(None, {'deadline': '2003'}), ['1', '2', '3']) - ae(filt(None, {'deadline': '2004'}), ['4']) - ae(filt(None, {'deadline': '2003-02-16'}), ['1']) - ae(filt(None, {'deadline': '2003-02-17'}), []) + ae, filter, filter_iter = self.filteringSetup() + for filt in filter, filter_iter: + ae(filt(None, {'deadline': '2002'}), []) + ae(filt(None, {'deadline': '2003'}), ['1', '2', '3']) + ae(filt(None, {'deadline': '2004'}), ['4']) + ae(filt(None, {'deadline': '2003-02-16'}), ['1']) + ae(filt(None, {'deadline': '2003-02-17'}), []) def testFilteringRangeMonths(self): - ae, filt = self.filteringSetup() + ae, filter, filter_iter = self.filteringSetup() for month in range(1, 13): for n in range(1, month+1): i = self.db.issue.create(title='%d.%d'%(month, n), @@ -1230,55 +1312,61 @@ self.db.commit() for month in range(1, 13): - r = filt(None, dict(deadline='2001-%02d'%month)) - assert len(r) == month, 'month %d != length %d'%(month, len(r)) + for filt in filter, filter_iter: + r = filt(None, dict(deadline='2001-%02d'%month)) + assert len(r) == month, 'month %d != length %d'%(month, len(r)) def testFilteringRangeInterval(self): - ae, filt = self.filteringSetup() - ae(filt(None, {'foo': 'from 0:50 to 2:00'}), ['1']) - ae(filt(None, {'foo': 'from 0:50 to 1d 2:00'}), ['1', '2']) - ae(filt(None, {'foo': 'from 5:50'}), ['2']) - ae(filt(None, {'foo': 'to 0:05'}), []) + ae, filter, filter_iter = self.filteringSetup() + for filt in filter, filter_iter: + ae(filt(None, {'foo': 'from 0:50 to 2:00'}), ['1']) + ae(filt(None, {'foo': 'from 0:50 to 1d 2:00'}), ['1', '2']) + ae(filt(None, {'foo': 'from 5:50'}), ['2']) + ae(filt(None, {'foo': 'to 0:05'}), []) def testFilteringRangeGeekInterval(self): - ae, filt = self.filteringSetup() + ae, filter, filter_iter = self.filteringSetup() for issue in ( { 'deadline': date.Date('. -2d')}, { 'deadline': date.Date('. -1d')}, { 'deadline': date.Date('. -8d')}, ): self.db.issue.create(**issue) - ae(filt(None, {'deadline': '-2d;'}), ['5', '6']) - ae(filt(None, {'deadline': '-1d;'}), ['6']) - ae(filt(None, {'deadline': '-1w;'}), ['5', '6']) + for filt in filter, filter_iter: + ae(filt(None, {'deadline': '-2d;'}), ['5', '6']) + ae(filt(None, {'deadline': '-1d;'}), ['6']) + ae(filt(None, {'deadline': '-1w;'}), ['5', '6']) def testFilteringIntervalSort(self): # 1: '1:10' # 2: '1d' # 3: None # 4: '0:10' - ae, filt = self.filteringSetup() - # ascending should sort None, 1:10, 1d - ae(filt(None, {}, ('+','foo'), (None,None)), ['3', '4', '1', '2']) - # descending should sort 1d, 1:10, None - ae(filt(None, {}, ('-','foo'), (None,None)), ['2', '1', '4', '3']) + ae, filter, filter_iter = self.filteringSetup() + for filt in filter, filter_iter: + # ascending should sort None, 1:10, 1d + ae(filt(None, {}, ('+','foo'), (None,None)), ['3', '4', '1', '2']) + # descending should sort 1d, 1:10, None + ae(filt(None, {}, ('-','foo'), (None,None)), ['2', '1', '4', '3']) def testFilteringStringSort(self): # 1: 'issue one' # 2: 'issue two' # 3: 'issue three' # 4: 'non four' - ae, filt = self.filteringSetup() - ae(filt(None, {}, ('+','title')), ['1', '3', '2', '4']) - ae(filt(None, {}, ('-','title')), ['4', '2', '3', '1']) + ae, filter, filter_iter = self.filteringSetup() + for filt in filter, filter_iter: + ae(filt(None, {}, ('+','title')), ['1', '3', '2', '4']) + ae(filt(None, {}, ('-','title')), ['4', '2', '3', '1']) # Test string case: For now allow both, w/wo case matching. # 1: 'issue one' # 2: 'issue two' # 3: 'Issue three' # 4: 'non four' self.db.issue.set('3', title='Issue three') - ae(filt(None, {}, ('+','title')), ['1', '3', '2', '4']) - ae(filt(None, {}, ('-','title')), ['4', '2', '3', '1']) + for filt in filter, filter_iter: + ae(filt(None, {}, ('+','title')), ['1', '3', '2', '4']) + ae(filt(None, {}, ('-','title')), ['4', '2', '3', '1']) # Obscure bug in anydbm backend trying to convert to number # 1: '1st issue' # 2: '2' @@ -1286,8 +1374,9 @@ # 4: 'non four' self.db.issue.set('1', title='1st issue') self.db.issue.set('2', title='2') - ae(filt(None, {}, ('+','title')), ['1', '2', '3', '4']) - ae(filt(None, {}, ('-','title')), ['4', '3', '2', '1']) + for filt in filter, filter_iter: + ae(filt(None, {}, ('+','title')), ['1', '2', '3', '4']) + ae(filt(None, {}, ('-','title')), ['4', '3', '2', '1']) def testFilteringMultilinkSort(self): # 1: [] Reverse: 1: [] @@ -1297,7 +1386,9 @@ # Note the sort order for the multilink doen't change when # reversing the sort direction due to the re-sorting of the # multilink! - ae, filt = self.filteringSetup() + # Note that we don't test filter_iter here, Multilink sort-order + # isn't defined for that. + ae, filt, dummy = self.filteringSetup() ae(filt(None, {}, ('+','nosy'), (None,None)), ['1', '2', '4', '3']) ae(filt(None, {}, ('-','nosy'), (None,None)), ['4', '3', '1', '2']) @@ -1306,7 +1397,9 @@ # 2: status: 1 "unread" nosy: [] # 3: status: 1 "unread" nosy: ['admin','fred'] # 4: status: 3 "testing" nosy: ['admin','bleep','fred'] - ae, filt = self.filteringSetup() + # Note that we don't test filter_iter here, Multilink sort-order + # isn't defined for that. + ae, filt, dummy = self.filteringSetup() ae(filt(None, {}, ('+','nosy'), ('+','status')), ['1', '4', '2', '3']) ae(filt(None, {}, ('-','nosy'), ('+','status')), ['1', '4', '3', '2']) ae(filt(None, {}, ('+','nosy'), ('-','status')), ['2', '3', '4', '1']) @@ -1321,228 +1414,202 @@ # 2: status: 1 -> 'u', priority: 3 -> 1 # 3: status: 1 -> 'u', priority: 2 -> 3 # 4: status: 3 -> 't', priority: 2 -> 3 - ae, filt = self.filteringSetup() - ae(filt(None, {}, ('+','status'), ('+','priority')), - ['1', '2', '4', '3']) - ae(filt(None, {'priority':'2'}, ('+','status'), ('+','priority')), - ['4', '3']) - ae(filt(None, {'priority.order':'3'}, ('+','status'), ('+','priority')), - ['4', '3']) - ae(filt(None, {'priority':['2','3']}, ('+','priority'), ('+','status')), - ['1', '4', '2', '3']) - ae(filt(None, {}, ('+','priority'), ('+','status')), - ['1', '4', '2', '3']) + ae, filter, filter_iter = self.filteringSetup() + for filt in filter, filter_iter: + ae(filt(None, {}, ('+','status'), ('+','priority')), + ['1', '2', '4', '3']) + ae(filt(None, {'priority':'2'}, ('+','status'), ('+','priority')), + ['4', '3']) + ae(filt(None, {'priority.order':'3'}, ('+','status'), + ('+','priority')), ['4', '3']) + ae(filt(None, {'priority':['2','3']}, ('+','priority'), + ('+','status')), ['1', '4', '2', '3']) + ae(filt(None, {}, ('+','priority'), ('+','status')), + ['1', '4', '2', '3']) def testFilteringDateSort(self): # '1': '2003-02-16.22:50' # '2': '2003-01-01.00:00' # '3': '2003-02-18' # '4': '2004-03-08' - ae, filt = self.filteringSetup() - # ascending - ae(filt(None, {}, ('+','deadline'), (None,None)), ['2', '1', '3', '4']) - # descending - ae(filt(None, {}, ('-','deadline'), (None,None)), ['4', '3', '1', '2']) + ae, filter, filter_iter = self.filteringSetup() + for f in filter, filter_iter: + # ascending + ae(f(None, {}, ('+','deadline'), (None,None)), ['2', '1', '3', '4']) + # descending + ae(f(None, {}, ('-','deadline'), (None,None)), ['4', '3', '1', '2']) def testFilteringDateSortPriorityGroup(self): # '1': '2003-02-16.22:50' 1 => 2 # '2': '2003-01-01.00:00' 3 => 1 # '3': '2003-02-18' 2 => 3 # '4': '2004-03-08' 1 => 2 - ae, filt = self.filteringSetup() + ae, filter, filter_iter = self.filteringSetup() - # ascending - ae(filt(None, {}, ('+','deadline'), ('+','priority')), - ['2', '1', '3', '4']) - ae(filt(None, {}, ('-','deadline'), ('+','priority')), - ['1', '2', '4', '3']) - # descending - ae(filt(None, {}, ('+','deadline'), ('-','priority')), - ['3', '4', '2', '1']) - ae(filt(None, {}, ('-','deadline'), ('-','priority')), - ['4', '3', '1', '2']) - - def filteringSetupTransitiveSearch(self): - u_m = {} - k = 30 - for user in ( - {'username': 'ceo', 'age': 129}, - {'username': 'grouplead1', 'age': 29, 'supervisor': '3'}, - {'username': 'grouplead2', 'age': 29, 'supervisor': '3'}, - {'username': 'worker1', 'age': 25, 'supervisor' : '4'}, - {'username': 'worker2', 'age': 24, 'supervisor' : '4'}, - {'username': 'worker3', 'age': 23, 'supervisor' : '5'}, - {'username': 'worker4', 'age': 22, 'supervisor' : '5'}, - {'username': 'worker5', 'age': 21, 'supervisor' : '5'}): - u = self.db.user.create(**user) - u_m [u] = self.db.msg.create(author = u, content = ' ' - , date = date.Date ('2006-01-%s' % k)) - k -= 1 - iss = self.db.issue - for issue in ( - {'title': 'ts1', 'status': '2', 'assignedto': '6', - 'priority': '3', 'messages' : [u_m ['6']], 'nosy' : ['4']}, - {'title': 'ts2', 'status': '1', 'assignedto': '6', - 'priority': '3', 'messages' : [u_m ['6']], 'nosy' : ['5']}, - {'title': 'ts4', 'status': '2', 'assignedto': '7', - 'priority': '3', 'messages' : [u_m ['7']]}, - {'title': 'ts5', 'status': '1', 'assignedto': '8', - 'priority': '3', 'messages' : [u_m ['8']]}, - {'title': 'ts6', 'status': '2', 'assignedto': '9', - 'priority': '3', 'messages' : [u_m ['9']]}, - {'title': 'ts7', 'status': '1', 'assignedto': '10', - 'priority': '3', 'messages' : [u_m ['10']]}, - {'title': 'ts8', 'status': '2', 'assignedto': '10', - 'priority': '3', 'messages' : [u_m ['10']]}, - {'title': 'ts9', 'status': '1', 'assignedto': '10', - 'priority': '3', 'messages' : [u_m ['10'], u_m ['9']]}): - self.db.issue.create(**issue) - return self.assertEqual, self.db.issue.filter + for filt in filter, filter_iter: + # ascending + ae(filt(None, {}, ('+','deadline'), ('+','priority')), + ['2', '1', '3', '4']) + ae(filt(None, {}, ('-','deadline'), ('+','priority')), + ['1', '2', '4', '3']) + # descending + ae(filt(None, {}, ('+','deadline'), ('-','priority')), + ['3', '4', '2', '1']) + ae(filt(None, {}, ('-','deadline'), ('-','priority')), + ['4', '3', '1', '2']) def testFilteringTransitiveLinkUser(self): - ae, filt = self.filteringSetupTransitiveSearch() - ufilt = self.db.user.filter - ae(ufilt(None, {'supervisor.username': 'ceo'}, ('+','username')), - ['4', '5']) - ae(ufilt(None, {'supervisor.supervisor.username': 'ceo'}, - ('+','username')), ['6', '7', '8', '9', '10']) - ae(ufilt(None, {'supervisor.supervisor': '3'}, ('+','username')), - ['6', '7', '8', '9', '10']) - ae(ufilt(None, {'supervisor.supervisor.id': '3'}, ('+','username')), - ['6', '7', '8', '9', '10']) - ae(ufilt(None, {'supervisor.username': 'grouplead1'}, ('+','username')), - ['6', '7']) - ae(ufilt(None, {'supervisor.username': 'grouplead2'}, ('+','username')), - ['8', '9', '10']) - ae(ufilt(None, {'supervisor.username': 'grouplead2', - 'supervisor.supervisor.username': 'ceo'}, ('+','username')), - ['8', '9', '10']) - ae(ufilt(None, {'supervisor.supervisor': '3', 'supervisor': '4'}, - ('+','username')), ['6', '7']) + ae, filter, filter_iter = self.filteringSetupTransitiveSearch('user') + for f in filter, filter_iter: + ae(f(None, {'supervisor.username': 'ceo'}, ('+','username')), + ['4', '5']) + ae(f(None, {'supervisor.supervisor.username': 'ceo'}, + ('+','username')), ['6', '7', '8', '9', '10']) + ae(f(None, {'supervisor.supervisor': '3'}, ('+','username')), + ['6', '7', '8', '9', '10']) + ae(f(None, {'supervisor.supervisor.id': '3'}, ('+','username')), + ['6', '7', '8', '9', '10']) + ae(f(None, {'supervisor.username': 'grouplead1'}, ('+','username')), + ['6', '7']) + ae(f(None, {'supervisor.username': 'grouplead2'}, ('+','username')), + ['8', '9', '10']) + ae(f(None, {'supervisor.username': 'grouplead2', + 'supervisor.supervisor.username': 'ceo'}, ('+','username')), + ['8', '9', '10']) + ae(f(None, {'supervisor.supervisor': '3', 'supervisor': '4'}, + ('+','username')), ['6', '7']) def testFilteringTransitiveLinkSort(self): - ae, filt = self.filteringSetupTransitiveSearch() - ufilt = self.db.user.filter + ae, filter, filter_iter = self.filteringSetupTransitiveSearch() + ae, ufilter, ufilter_iter = self.iterSetup('user') # Need to make ceo his own (and first two users') supervisor, # otherwise we will depend on sorting order of NULL values. # Leave that to a separate test. self.db.user.set('1', supervisor = '3') self.db.user.set('2', supervisor = '3') self.db.user.set('3', supervisor = '3') - ae(ufilt(None, {'supervisor':'3'}, []), ['1', '2', '3', '4', '5']) - ae(ufilt(None, {}, [('+','supervisor.supervisor.supervisor'), - ('+','supervisor.supervisor'), ('+','supervisor'), - ('+','username')]), - ['1', '3', '2', '4', '5', '6', '7', '8', '9', '10']) - ae(ufilt(None, {}, [('+','supervisor.supervisor.supervisor'), - ('-','supervisor.supervisor'), ('-','supervisor'), - ('+','username')]), - ['8', '9', '10', '6', '7', '1', '3', '2', '4', '5']) - ae(filt(None, {}, [('+','assignedto.supervisor.supervisor.supervisor'), - ('+','assignedto.supervisor.supervisor'), - ('+','assignedto.supervisor'), ('+','assignedto')]), - ['1', '2', '3', '4', '5', '6', '7', '8']) - ae(filt(None, {}, [('+','assignedto.supervisor.supervisor.supervisor'), - ('+','assignedto.supervisor.supervisor'), - ('-','assignedto.supervisor'), ('+','assignedto')]), - ['4', '5', '6', '7', '8', '1', '2', '3']) - ae(filt(None, {}, [('+','assignedto.supervisor.supervisor.supervisor'), - ('+','assignedto.supervisor.supervisor'), - ('+','assignedto.supervisor'), ('+','assignedto'), - ('-','status')]), - ['2', '1', '3', '4', '5', '6', '8', '7']) - ae(filt(None, {}, [('+','assignedto.supervisor.supervisor.supervisor'), - ('+','assignedto.supervisor.supervisor'), - ('+','assignedto.supervisor'), ('+','assignedto'), - ('+','status')]), - ['1', '2', '3', '4', '5', '7', '6', '8']) - ae(filt(None, {}, [('+','assignedto.supervisor.supervisor.supervisor'), - ('+','assignedto.supervisor.supervisor'), - ('-','assignedto.supervisor'), ('+','assignedto'), ('+','status')]), - ['4', '5', '7', '6', '8', '1', '2', '3']) - ae(filt(None, {'assignedto':['6','7','8','9','10']}, - [('+','assignedto.supervisor.supervisor.supervisor'), - ('+','assignedto.supervisor.supervisor'), - ('-','assignedto.supervisor'), ('+','assignedto'), ('+','status')]), - ['4', '5', '7', '6', '8', '1', '2', '3']) - ae(filt(None, {'assignedto':['6','7','8','9']}, - [('+','assignedto.supervisor.supervisor.supervisor'), - ('+','assignedto.supervisor.supervisor'), - ('-','assignedto.supervisor'), ('+','assignedto'), ('+','status')]), - ['4', '5', '1', '2', '3']) + for ufilt in ufilter, ufilter_iter: + ae(ufilt(None, {'supervisor':'3'}, []), ['1', '2', '3', '4', '5']) + ae(ufilt(None, {}, [('+','supervisor.supervisor.supervisor'), + ('+','supervisor.supervisor'), ('+','supervisor'), + ('+','username')]), + ['1', '3', '2', '4', '5', '6', '7', '8', '9', '10']) + ae(ufilt(None, {}, [('+','supervisor.supervisor.supervisor'), + ('-','supervisor.supervisor'), ('-','supervisor'), + ('+','username')]), + ['8', '9', '10', '6', '7', '1', '3', '2', '4', '5']) + for f in filter, filter_iter: + ae(f(None, {}, [('+','assignedto.supervisor.supervisor.supervisor'), + ('+','assignedto.supervisor.supervisor'), + ('+','assignedto.supervisor'), ('+','assignedto')]), + ['1', '2', '3', '4', '5', '6', '7', '8']) + ae(f(None, {}, [('+','assignedto.supervisor.supervisor.supervisor'), + ('+','assignedto.supervisor.supervisor'), + ('-','assignedto.supervisor'), ('+','assignedto')]), + ['4', '5', '6', '7', '8', '1', '2', '3']) + ae(f(None, {}, [('+','assignedto.supervisor.supervisor.supervisor'), + ('+','assignedto.supervisor.supervisor'), + ('+','assignedto.supervisor'), ('+','assignedto'), + ('-','status')]), + ['2', '1', '3', '4', '5', '6', '8', '7']) + ae(f(None, {}, [('+','assignedto.supervisor.supervisor.supervisor'), + ('+','assignedto.supervisor.supervisor'), + ('+','assignedto.supervisor'), ('+','assignedto'), + ('+','status')]), + ['1', '2', '3', '4', '5', '7', '6', '8']) + ae(f(None, {}, [('+','assignedto.supervisor.supervisor.supervisor'), + ('+','assignedto.supervisor.supervisor'), + ('-','assignedto.supervisor'), ('+','assignedto'), + ('+','status')]), ['4', '5', '7', '6', '8', '1', '2', '3']) + ae(f(None, {'assignedto':['6','7','8','9','10']}, + [('+','assignedto.supervisor.supervisor.supervisor'), + ('+','assignedto.supervisor.supervisor'), + ('-','assignedto.supervisor'), ('+','assignedto'), + ('+','status')]), ['4', '5', '7', '6', '8', '1', '2', '3']) + ae(f(None, {'assignedto':['6','7','8','9']}, + [('+','assignedto.supervisor.supervisor.supervisor'), + ('+','assignedto.supervisor.supervisor'), + ('-','assignedto.supervisor'), ('+','assignedto'), + ('+','status')]), ['4', '5', '1', '2', '3']) def testFilteringTransitiveLinkSortNull(self): """Check sorting of NULL values""" - ae, filt = self.filteringSetupTransitiveSearch() - ufilt = self.db.user.filter - ae(ufilt(None, {}, [('+','supervisor.supervisor.supervisor'), - ('+','supervisor.supervisor'), ('+','supervisor'), - ('+','username')]), - ['1', '3', '2', '4', '5', '6', '7', '8', '9', '10']) - ae(ufilt(None, {}, [('+','supervisor.supervisor.supervisor'), - ('-','supervisor.supervisor'), ('-','supervisor'), - ('+','username')]), - ['8', '9', '10', '6', '7', '4', '5', '1', '3', '2']) - ae(filt(None, {}, [('+','assignedto.supervisor.supervisor.supervisor'), - ('+','assignedto.supervisor.supervisor'), - ('+','assignedto.supervisor'), ('+','assignedto')]), - ['1', '2', '3', '4', '5', '6', '7', '8']) - ae(filt(None, {}, [('+','assignedto.supervisor.supervisor.supervisor'), - ('+','assignedto.supervisor.supervisor'), - ('-','assignedto.supervisor'), ('+','assignedto')]), - ['4', '5', '6', '7', '8', '1', '2', '3']) + ae, filter, filter_iter = self.filteringSetupTransitiveSearch() + ae, ufilter, ufilter_iter = self.iterSetup('user') + for ufilt in ufilter, ufilter_iter: + ae(ufilt(None, {}, [('+','supervisor.supervisor.supervisor'), + ('+','supervisor.supervisor'), ('+','supervisor'), + ('+','username')]), + ['1', '3', '2', '4', '5', '6', '7', '8', '9', '10']) + ae(ufilt(None, {}, [('+','supervisor.supervisor.supervisor'), + ('-','supervisor.supervisor'), ('-','supervisor'), + ('+','username')]), + ['8', '9', '10', '6', '7', '4', '5', '1', '3', '2']) + for f in filter, filter_iter: + ae(f(None, {}, [('+','assignedto.supervisor.supervisor.supervisor'), + ('+','assignedto.supervisor.supervisor'), + ('+','assignedto.supervisor'), ('+','assignedto')]), + ['1', '2', '3', '4', '5', '6', '7', '8']) + ae(f(None, {}, [('+','assignedto.supervisor.supervisor.supervisor'), + ('+','assignedto.supervisor.supervisor'), + ('-','assignedto.supervisor'), ('+','assignedto')]), + ['4', '5', '6', '7', '8', '1', '2', '3']) def testFilteringTransitiveLinkIssue(self): - ae, filt = self.filteringSetupTransitiveSearch() - ae(filt(None, {'assignedto.supervisor.username': 'grouplead1'}, - ('+','id')), ['1', '2', '3']) - ae(filt(None, {'assignedto.supervisor.username': 'grouplead2'}, - ('+','id')), ['4', '5', '6', '7', '8']) - ae(filt(None, {'assignedto.supervisor.username': 'grouplead2', - 'status': '1'}, ('+','id')), ['4', '6', '8']) - ae(filt(None, {'assignedto.supervisor.username': 'grouplead2', - 'status': '2'}, ('+','id')), ['5', '7']) - ae(filt(None, {'assignedto.supervisor.username': ['grouplead2'], - 'status': '2'}, ('+','id')), ['5', '7']) - ae(filt(None, {'assignedto.supervisor': ['4', '5'], 'status': '2'}, - ('+','id')), ['1', '3', '5', '7']) + ae, filter, filter_iter = self.filteringSetupTransitiveSearch() + for filt in filter, filter_iter: + ae(filt(None, {'assignedto.supervisor.username': 'grouplead1'}, + ('+','id')), ['1', '2', '3']) + ae(filt(None, {'assignedto.supervisor.username': 'grouplead2'}, + ('+','id')), ['4', '5', '6', '7', '8']) + ae(filt(None, {'assignedto.supervisor.username': 'grouplead2', + 'status': '1'}, ('+','id')), ['4', '6', '8']) + ae(filt(None, {'assignedto.supervisor.username': 'grouplead2', + 'status': '2'}, ('+','id')), ['5', '7']) + ae(filt(None, {'assignedto.supervisor.username': ['grouplead2'], + 'status': '2'}, ('+','id')), ['5', '7']) + ae(filt(None, {'assignedto.supervisor': ['4', '5'], 'status': '2'}, + ('+','id')), ['1', '3', '5', '7']) def testFilteringTransitiveMultilink(self): - ae, filt = self.filteringSetupTransitiveSearch() - ae(filt(None, {'messages.author.username': 'grouplead1'}, - ('+','id')), []) - ae(filt(None, {'messages.author': '6'}, - ('+','id')), ['1', '2']) - ae(filt(None, {'messages.author.id': '6'}, - ('+','id')), ['1', '2']) - ae(filt(None, {'messages.author.username': 'worker1'}, - ('+','id')), ['1', '2']) - ae(filt(None, {'messages.author': '10'}, - ('+','id')), ['6', '7', '8']) - ae(filt(None, {'messages.author': '9'}, - ('+','id')), ['5', '8']) - ae(filt(None, {'messages.author': ['9', '10']}, - ('+','id')), ['5', '6', '7', '8']) - ae(filt(None, {'messages.author': ['8', '9']}, - ('+','id')), ['4', '5', '8']) - ae(filt(None, {'messages.author': ['8', '9'], 'status' : '1'}, - ('+','id')), ['4', '8']) - ae(filt(None, {'messages.author': ['8', '9'], 'status' : '2'}, - ('+','id')), ['5']) - ae(filt(None, {'messages.author': ['8', '9', '10'], - 'messages.date': '2006-01-22.21:00;2006-01-23'}, ('+','id')), - ['6', '7', '8']) - ae(filt(None, {'nosy.supervisor.username': 'ceo'}, - ('+','id')), ['1', '2']) - ae(filt(None, {'messages.author': ['6', '9']}, - ('+','id')), ['1', '2', '5', '8']) - ae(filt(None, {'messages': ['5', '7']}, - ('+','id')), ['3', '5', '8']) - ae(filt(None, {'messages.author': ['6', '9'], 'messages': ['5', '7']}, - ('+','id')), ['5', '8']) + ae, filter, filter_iter = self.filteringSetupTransitiveSearch() + for filt in filter, filter_iter: + ae(filt(None, {'messages.author.username': 'grouplead1'}, + ('+','id')), []) + ae(filt(None, {'messages.author': '6'}, + ('+','id')), ['1', '2']) + ae(filt(None, {'messages.author.id': '6'}, + ('+','id')), ['1', '2']) + ae(filt(None, {'messages.author.username': 'worker1'}, + ('+','id')), ['1', '2']) + ae(filt(None, {'messages.author': '10'}, + ('+','id')), ['6', '7', '8']) + ae(filt(None, {'messages.author': '9'}, + ('+','id')), ['5', '8']) + ae(filt(None, {'messages.author': ['9', '10']}, + ('+','id')), ['5', '6', '7', '8']) + ae(filt(None, {'messages.author': ['8', '9']}, + ('+','id')), ['4', '5', '8']) + ae(filt(None, {'messages.author': ['8', '9'], 'status' : '1'}, + ('+','id')), ['4', '8']) + ae(filt(None, {'messages.author': ['8', '9'], 'status' : '2'}, + ('+','id')), ['5']) + ae(filt(None, {'messages.author': ['8', '9', '10'], + 'messages.date': '2006-01-22.21:00;2006-01-23'}, ('+','id')), + ['6', '7', '8']) + ae(filt(None, {'nosy.supervisor.username': 'ceo'}, + ('+','id')), ['1', '2']) + ae(filt(None, {'messages.author': ['6', '9']}, + ('+','id')), ['1', '2', '5', '8']) + ae(filt(None, {'messages': ['5', '7']}, + ('+','id')), ['3', '5', '8']) + ae(filt(None, {'messages.author': ['6', '9'], + 'messages': ['5', '7']}, ('+','id')), ['5', '8']) def testFilteringTransitiveMultilinkSort(self): - ae, filt = self.filteringSetupTransitiveSearch() + # Note that we don't test filter_iter here, Multilink sort-order + # isn't defined for that. + ae, filt, dummy = self.filteringSetupTransitiveSearch() ae(filt(None, {}, [('+','messages.author')]), ['1', '2', '3', '4', '5', '8', '6', '7']) ae(filt(None, {}, [('-','messages.author')]), @@ -1607,9 +1674,10 @@ ['3', '1', '2', '6', '7', '5', '4', '8']) def testFilteringSortId(self): - ae, filt = self.filteringSetupTransitiveSearch() - ae(self.db.user.filter(None, {}, ('+','id')), - ['1', '2', '3', '4', '5', '6', '7', '8', '9', '10']) + ae, filter, filter_iter = self.filteringSetupTransitiveSearch('user') + for filt in filter, filter_iter: + ae(filt(None, {}, ('+','id')), + ['1', '2', '3', '4', '5', '6', '7', '8', '9', '10']) # XXX add sorting tests for other types @@ -1624,10 +1692,9 @@ self.db = self.module.Database(config, 'admin') setupSchema(self.db, 0, self.module) - def testImportExport(self): # use the filtering setup to create a bunch of items - ae, filt = self.filteringSetup() + ae, dummy1, dummy2 = self.filteringSetup() # Get some stuff into the journal for testing import/export of # journal data: self.db.user.set('4', password = password.Password('xyzzy')) @@ -1740,7 +1807,7 @@ import roundup.admin import csv # use the filtering setup to create a bunch of items - ae, filt = self.filteringSetup() + ae, dummy1, dummy2 = self.filteringSetup() # create large field self.db.priority.create(name = 'X' * 500) self.db.config.CSV_FIELD_SIZE = 400 @@ -1835,6 +1902,8 @@ """Creates one issue with two attachments, one smaller and one larger than the set max_attachment_size. """ + old_translate_ = roundupdb._ + roundupdb._ = i18n.get_translation(language='C').gettext db = self.db db.config.NOSY_MAX_ATTACHMENT_SIZE = 4096 res = dict(mail_to = None, mail_msg = None) @@ -1852,15 +1921,16 @@ db.issue.nosymessage(i, m, {}) mail_msg = str(res["mail_msg"]) self.assertEqual(res["mail_to"], ["fred at example.com"]) - self.failUnless("From: admin" in mail_msg) - self.failUnless("Subject: [issue1] spam" in mail_msg) - self.failUnless("New submission from admin" in mail_msg) - self.failUnless("one two" in mail_msg) - self.failIf("File 'test1.txt' not attached" in mail_msg) - self.failUnless(base64.encodestring("xxx").rstrip() in mail_msg) - self.failUnless("File 'test2.txt' not attached" in mail_msg) - self.failIf(base64.encodestring("yyy").rstrip() in mail_msg) + self.assert_("From: admin" in mail_msg) + self.assert_("Subject: [issue1] spam" in mail_msg) + self.assert_("New submission from admin" in mail_msg) + self.assert_("one two" in mail_msg) + self.assert_("File 'test1.txt' not attached" not in mail_msg) + self.assert_(base64.encodestring("xxx").rstrip() in mail_msg) + self.assert_("File 'test2.txt' not attached" in mail_msg) + self.assert_(base64.encodestring("yyy").rstrip() not in mail_msg) finally : + roundupdb._ = old_translate_ Mailer.smtp_send = backup class ROTest(MyTestCase): @@ -1894,7 +1964,7 @@ os.makedirs(config.DATABASE + '/files') def test_reservedProperties(self): - self.db = self.module.Database(config, 'admin') + self.open_database() self.assertRaises(ValueError, self.module.Class, self.db, "a", creation=String()) self.assertRaises(ValueError, self.module.Class, self.db, "a", @@ -1905,13 +1975,13 @@ actor=String()) def init_a(self): - self.db = self.module.Database(config, 'admin') + self.open_database() a = self.module.Class(self.db, "a", name=String()) a.setkey("name") self.db.post_init() def test_fileClassProps(self): - self.db = self.module.Database(config, 'admin') + self.open_database() a = self.module.FileClass(self.db, 'a') l = a.getprops().keys() l.sort() @@ -1919,7 +1989,7 @@ 'creation', 'type']) def init_ab(self): - self.db = self.module.Database(config, 'admin') + self.open_database() a = self.module.Class(self.db, "a", name=String()) a.setkey("name") b = self.module.Class(self.db, "b", name=String(), @@ -1957,7 +2027,7 @@ self.db.getjournal('b', bid) def init_amod(self): - self.db = self.module.Database(config, 'admin') + self.open_database() a = self.module.Class(self.db, "a", name=String(), newstr=String(), newint=Interval(), newnum=Number(), newbool=Boolean(), newdate=Date()) @@ -2001,7 +2071,7 @@ self.db.getjournal('a', aid2) def init_amodkey(self): - self.db = self.module.Database(config, 'admin') + self.open_database() a = self.module.Class(self.db, "a", name=String(), newstr=String()) a.setkey("newstr") b = self.module.Class(self.db, "b", name=String()) @@ -2044,7 +2114,7 @@ def init_amodml(self): - self.db = self.module.Database(config, 'admin') + self.open_database() a = self.module.Class(self.db, "a", name=String(), newml=Multilink('a')) a.setkey('name') @@ -2118,6 +2188,123 @@ self.assertEqual(self.db.sql_index_exists('_issue', '_issue_id_idx'), 1) self.assertEqual(self.db.sql_index_exists('_issue', '_issue_x_idx'), 0) +class FilterCacheTest(commonDBTest): + def testFilteringTransitiveLinkCache(self): + ae, filter, filter_iter = self.filteringSetupTransitiveSearch() + ae, ufilter, ufilter_iter = self.iterSetup('user') + # Need to make ceo his own (and first two users') supervisor + self.db.user.set('1', supervisor = '3') + self.db.user.set('2', supervisor = '3') + self.db.user.set('3', supervisor = '3') + # test bool value + self.db.user.set('4', assignable = True) + self.db.user.set('3', assignable = False) + filt = self.db.issue.filter_iter + ufilt = self.db.user.filter_iter + user_result = \ + { '1' : {'username': 'admin', 'assignable': None, + 'supervisor': '3', 'realname': None, 'roles': 'Admin', + 'creator': '1', 'age': None, 'actor': '1', + 'address': None} + , '2' : {'username': 'fred', 'assignable': None, + 'supervisor': '3', 'realname': None, 'roles': 'User', + 'creator': '1', 'age': None, 'actor': '1', + 'address': 'fred at example.com'} + , '3' : {'username': 'ceo', 'assignable': False, + 'supervisor': '3', 'realname': None, 'roles': None, + 'creator': '1', 'age': 129.0, 'actor': '1', + 'address': None} + , '4' : {'username': 'grouplead1', 'assignable': True, + 'supervisor': '3', 'realname': None, 'roles': None, + 'creator': '1', 'age': 29.0, 'actor': '1', + 'address': None} + , '5' : {'username': 'grouplead2', 'assignable': None, + 'supervisor': '3', 'realname': None, 'roles': None, + 'creator': '1', 'age': 29.0, 'actor': '1', + 'address': None} + , '6' : {'username': 'worker1', 'assignable': None, + 'supervisor': '4', 'realname': None, 'roles': None, + 'creator': '1', 'age': 25.0, 'actor': '1', + 'address': None} + , '7' : {'username': 'worker2', 'assignable': None, + 'supervisor': '4', 'realname': None, 'roles': None, + 'creator': '1', 'age': 24.0, 'actor': '1', + 'address': None} + , '8' : {'username': 'worker3', 'assignable': None, + 'supervisor': '5', 'realname': None, 'roles': None, + 'creator': '1', 'age': 23.0, 'actor': '1', + 'address': None} + , '9' : {'username': 'worker4', 'assignable': None, + 'supervisor': '5', 'realname': None, 'roles': None, + 'creator': '1', 'age': 22.0, 'actor': '1', + 'address': None} + , '10' : {'username': 'worker5', 'assignable': None, + 'supervisor': '5', 'realname': None, 'roles': None, + 'creator': '1', 'age': 21.0, 'actor': '1', + 'address': None} + } + foo = date.Interval('-1d') + issue_result = \ + { '1' : {'title': 'ts1', 'status': '2', 'assignedto': '6', + 'priority': '3', 'messages' : ['4'], 'nosy' : ['4']} + , '2' : {'title': 'ts2', 'status': '1', 'assignedto': '6', + 'priority': '3', 'messages' : ['4'], 'nosy' : ['5']} + , '3' : {'title': 'ts4', 'status': '2', 'assignedto': '7', + 'priority': '3', 'messages' : ['5']} + , '4' : {'title': 'ts5', 'status': '1', 'assignedto': '8', + 'priority': '3', 'messages' : ['6']} + , '5' : {'title': 'ts6', 'status': '2', 'assignedto': '9', + 'priority': '3', 'messages' : ['7']} + , '6' : {'title': 'ts7', 'status': '1', 'assignedto': '10', + 'priority': '3', 'messages' : ['8'], 'foo' : None} + , '7' : {'title': 'ts8', 'status': '2', 'assignedto': '10', + 'priority': '3', 'messages' : ['8'], 'foo' : foo} + , '8' : {'title': 'ts9', 'status': '1', 'assignedto': '10', + 'priority': '3', 'messages' : ['7', '8']} + } + result = [] + self.db.clearCache() + for id in ufilt(None, {}, [('+','supervisor.supervisor.supervisor'), + ('-','supervisor.supervisor'), ('-','supervisor'), + ('+','username')]): + result.append(id) + nodeid = id + for x in range(4): + assert(('user', nodeid) in self.db.cache) + n = self.db.user.getnode(nodeid) + for k, v in user_result[nodeid].iteritems(): + ae((k, n[k]), (k, v)) + for k in 'creation', 'activity': + assert(n[k]) + nodeid = n.supervisor + self.db.clearCache() + ae (result, ['8', '9', '10', '6', '7', '1', '3', '2', '4', '5']) + + result = [] + self.db.clearCache() + for id in filt(None, {}, + [('+','assignedto.supervisor.supervisor.supervisor'), + ('+','assignedto.supervisor.supervisor'), + ('-','assignedto.supervisor'), ('+','assignedto')]): + result.append(id) + assert(('issue', id) in self.db.cache) + n = self.db.issue.getnode(id) + for k, v in issue_result[id].iteritems(): + ae((k, n[k]), (k, v)) + for k in 'creation', 'activity': + assert(n[k]) + nodeid = n.assignedto + for x in range(4): + assert(('user', nodeid) in self.db.cache) + n = self.db.user.getnode(nodeid) + for k, v in user_result[nodeid].iteritems(): + ae((k, n[k]), (k, v)) + for k in 'creation', 'activity': + assert(n[k]) + nodeid = n.supervisor + self.db.clearCache() + ae (result, ['4', '5', '6', '7', '8', '1', '2', '3']) + class ClassicInitTest(unittest.TestCase): count = 0 @@ -2166,4 +2353,36 @@ except OSError, error: if error.errno not in (errno.ENOENT, errno.ESRCH): raise +class ConcurrentDBTest(ClassicInitTest): + def testConcurrency(self): + # The idea here is a read-modify-update cycle in the presence of + # a cache that has to be properly handled. The same applies if + # we extend a String or otherwise modify something that depends + # on the previous value. + + # set up and open a tracker + tracker = setupTracker(self.dirname, self.backend) + # open the database + self.db = tracker.open('admin') + + prio = '1' + self.assertEqual(self.db.priority.get(prio, 'order'), 1.0) + def inc(db): + db.priority.set(prio, order=db.priority.get(prio, 'order') + 1) + + inc(self.db) + + db2 = tracker.open("admin") + self.assertEqual(db2.priority.get(prio, 'order'), 1.0) + db2.commit() + self.db.commit() + self.assertEqual(self.db.priority.get(prio, 'order'), 2.0) + + inc(db2) + db2.commit() + db2.clearCache() + self.assertEqual(db2.priority.get(prio, 'order'), 3.0) + db2.close() + + # vim: set et sts=4 sw=4 : Added: tracker/roundup-src/test/memorydb.py ============================================================================== --- (empty file) +++ tracker/roundup-src/test/memorydb.py Thu Aug 4 15:46:52 2011 @@ -0,0 +1,434 @@ +# $Id: test_memorydb.py,v 1.4 2004-11-03 01:34:21 richard Exp $ +'''Implement an in-memory hyperdb for testing purposes. +''' + +import shutil + +from roundup import hyperdb +from roundup import roundupdb +from roundup import security +from roundup import password +from roundup import configuration +from roundup.backends import back_anydbm +from roundup.backends import indexer_dbm +from roundup.backends import sessions_dbm +from roundup.backends import indexer_common +from roundup.hyperdb import * +from roundup.support import ensureParentsExist + +def new_config(debug=False): + config = configuration.CoreConfig() + config.DATABASE = "db" + #config.logging = MockNull() + # these TRACKER_WEB and MAIL_DOMAIN values are used in mailgw tests + if debug: + config.LOGGING_LEVEL = "DEBUG" + config.MAIL_DOMAIN = "your.tracker.email.domain.example" + config.TRACKER_WEB = "http://tracker.example/cgi-bin/roundup.cgi/bugs/" + return config + +def create(journaltag, create=True, debug=False): + db = Database(new_config(debug), journaltag) + + # load standard schema + schema = os.path.join(os.path.dirname(__file__), + '../share/roundup/templates/classic/schema.py') + vars = dict(globals()) + vars['db'] = db + execfile(schema, vars) + initial_data = os.path.join(os.path.dirname(__file__), + '../share/roundup/templates/classic/initial_data.py') + vars = dict(db=db, admin_email='admin at test.com', + adminpw=password.Password('sekrit')) + execfile(initial_data, vars) + + # load standard detectors + dirname = os.path.join(os.path.dirname(__file__), + '../share/roundup/templates/classic/detectors') + for fn in os.listdir(dirname): + if not fn.endswith('.py'): continue + vars = {} + execfile(os.path.join(dirname, fn), vars) + vars['init'](db) + + ''' + status = Class(db, "status", name=String()) + status.setkey("name") + priority = Class(db, "priority", name=String(), order=String()) + priority.setkey("name") + keyword = Class(db, "keyword", name=String(), order=String()) + keyword.setkey("name") + user = Class(db, "user", username=String(), password=Password(), + assignable=Boolean(), age=Number(), roles=String(), address=String(), + supervisor=Link('user'),realname=String(),alternate_addresses=String()) + user.setkey("username") + file = FileClass(db, "file", name=String(), type=String(), + comment=String(indexme="yes"), fooz=Password()) + file_nidx = FileClass(db, "file_nidx", content=String(indexme='no')) + issue = IssueClass(db, "issue", title=String(indexme="yes"), + status=Link("status"), nosy=Multilink("user"), deadline=Date(), + foo=Interval(), files=Multilink("file"), assignedto=Link('user'), + priority=Link('priority'), spam=Multilink('msg'), + feedback=Link('msg')) + stuff = Class(db, "stuff", stuff=String()) + session = Class(db, 'session', title=String()) + msg = FileClass(db, "msg", date=Date(), + author=Link("user", do_journal='no'), + files=Multilink('file'), inreplyto=String(), + messageid=String(), summary=String(), + content=String(), + recipients=Multilink("user", do_journal='no') + ) + ''' + if create: + db.user.create(username="fred", roles='User', + password=password.Password('sekrit'), address='fred at example.com') + + db.security.addPermissionToRole('User', 'Email Access') + ''' + db.security.addPermission(name='Register', klass='user') + db.security.addPermissionToRole('User', 'Web Access') + db.security.addPermissionToRole('Anonymous', 'Email Access') + db.security.addPermissionToRole('Anonymous', 'Register', 'user') + for cl in 'issue', 'file', 'msg', 'keyword': + db.security.addPermissionToRole('User', 'View', cl) + db.security.addPermissionToRole('User', 'Edit', cl) + db.security.addPermissionToRole('User', 'Create', cl) + for cl in 'priority', 'status': + db.security.addPermissionToRole('User', 'View', cl) + ''' + return db + +class cldb(dict): + def close(self): + pass + +class BasicDatabase(dict): + ''' Provide a nice encapsulation of an anydbm store. + + Keys are id strings, values are automatically marshalled data. + ''' + def __getitem__(self, key): + if key not in self: + d = self[key] = {} + return d + return super(BasicDatabase, self).__getitem__(key) + def exists(self, infoid): + return infoid in self + def get(self, infoid, value, default=None): + return self[infoid].get(value, default) + def getall(self, infoid): + if infoid not in self: + raise KeyError(infoid) + return self[infoid] + def set(self, infoid, **newvalues): + self[infoid].update(newvalues) + def list(self): + return self.keys() + def destroy(self, infoid): + del self[infoid] + def commit(self): + pass + def close(self): + pass + def updateTimestamp(self, sessid): + pass + def clean(self): + pass + +class Sessions(BasicDatabase, sessions_dbm.Sessions): + name = 'sessions' + +class OneTimeKeys(BasicDatabase, sessions_dbm.Sessions): + name = 'otks' + +class Indexer(indexer_dbm.Indexer): + def __init__(self, db): + indexer_common.Indexer.__init__(self, db) + self.reindex = 0 + self.quiet = 9 + self.changed = 0 + + def load_index(self, reload=0, wordlist=None): + # Unless reload is indicated, do not load twice + if self.index_loaded() and not reload: + return 0 + self.words = {} + self.files = {'_TOP':(0,None)} + self.fileids = {} + self.changed = 0 + + def save_index(self): + pass + def force_reindex(self): + # TODO I'm concerned that force_reindex may not be tested by + # testForcedReindexing if the functionality can just be removed + pass + +class Database(back_anydbm.Database): + """A database for storing records containing flexible data types. + + Transaction stuff TODO: + + - check the timestamp of the class file and nuke the cache if it's + modified. Do some sort of conflict checking on the dirty stuff. + - perhaps detect write collisions (related to above)? + """ + def __init__(self, config, journaltag=None): + self.config, self.journaltag = config, journaltag + self.classes = {} + self.items = {} + self.ids = {} + self.journals = {} + self.files = {} + self.tx_files = {} + self.security = security.Security(self) + self.stats = {'cache_hits': 0, 'cache_misses': 0, 'get_items': 0, + 'filtering': 0} + self.sessions = Sessions() + self.otks = OneTimeKeys() + self.indexer = Indexer(self) + + # anydbm bits + self.cache = {} # cache of nodes loaded or created + self.dirtynodes = {} # keep track of the dirty nodes by class + self.newnodes = {} # keep track of the new nodes by class + self.destroyednodes = {}# keep track of the destroyed nodes by class + self.transactions = [] + + def filename(self, classname, nodeid, property=None, create=0): + shutil.copyfile(__file__, __file__+'.dummy') + return __file__+'.dummy' + + def filesize(self, classname, nodeid, property=None, create=0): + return len(self.getfile(classname, nodeid, property)) + + def post_init(self): + pass + + def refresh_database(self): + pass + + def getSessionManager(self): + return self.sessions + + def getOTKManager(self): + return self.otks + + def reindex(self, classname=None, show_progress=False): + pass + + def __repr__(self): + return ''%id(self) + + def storefile(self, classname, nodeid, property, content): + self.tx_files[classname, nodeid, property] = content + self.transactions.append((self.doStoreFile, (classname, nodeid, + property))) + + def getfile(self, classname, nodeid, property): + if (classname, nodeid, property) in self.tx_files: + return self.tx_files[classname, nodeid, property] + return self.files[classname, nodeid, property] + + def doStoreFile(self, classname, nodeid, property, **databases): + self.files[classname, nodeid, property] = self.tx_files[classname, nodeid, property] + return (classname, nodeid) + + def rollbackStoreFile(self, classname, nodeid, property, **databases): + del self.tx_files[classname, nodeid, property] + + def numfiles(self): + return len(self.files) + len(self.tx_files) + + def close(self): + self.clearCache() + self.tx_files = {} + # kill the schema too + self.classes = {} + # just keep the .items + + # + # Classes + # + def __getattr__(self, classname): + """A convenient way of calling self.getclass(classname).""" + if self.classes.has_key(classname): + return self.classes[classname] + raise AttributeError, classname + + def addclass(self, cl): + cn = cl.classname + if self.classes.has_key(cn): + raise ValueError, cn + self.classes[cn] = cl + if cn not in self.items: + self.items[cn] = cldb() + self.ids[cn] = 0 + + # add default Edit and View permissions + self.security.addPermission(name="Create", klass=cn, + description="User is allowed to create "+cn) + self.security.addPermission(name="Edit", klass=cn, + description="User is allowed to edit "+cn) + self.security.addPermission(name="View", klass=cn, + description="User is allowed to access "+cn) + + def getclasses(self): + """Return a list of the names of all existing classes.""" + l = self.classes.keys() + l.sort() + return l + + def getclass(self, classname): + """Get the Class object representing a particular class. + + If 'classname' is not a valid class name, a KeyError is raised. + """ + try: + return self.classes[classname] + except KeyError: + raise KeyError, 'There is no class called "%s"'%classname + + # + # Class DBs + # + def clear(self): + self.items = {} + + def getclassdb(self, classname, mode='r'): + """ grab a connection to the class db that will be used for + multiple actions + """ + return self.items[classname] + + def getCachedJournalDB(self, classname): + return self.journals.setdefault(classname, {}) + + # + # Node IDs + # + def newid(self, classname): + self.ids[classname] += 1 + return str(self.ids[classname]) + def setid(self, classname, id): + self.ids[classname] = int(id) + + # + # Journal + # + def doSaveJournal(self, classname, nodeid, action, params, creator, + creation): + if creator is None: + creator = self.getuid() + if creation is None: + creation = date.Date() + self.journals.setdefault(classname, {}).setdefault(nodeid, + []).append((nodeid, creation, creator, action, params)) + + def doSetJournal(self, classname, nodeid, journal): + self.journals.setdefault(classname, {})[nodeid] = journal + + def getjournal(self, classname, nodeid): + # our journal result + res = [] + + # add any journal entries for transactions not committed to the + # database + for method, args in self.transactions: + if method != self.doSaveJournal: + continue + (cache_classname, cache_nodeid, cache_action, cache_params, + cache_creator, cache_creation) = args + if cache_classname == classname and cache_nodeid == nodeid: + if not cache_creator: + cache_creator = self.getuid() + if not cache_creation: + cache_creation = date.Date() + res.append((cache_nodeid, cache_creation, cache_creator, + cache_action, cache_params)) + try: + res += self.journals.get(classname, {})[nodeid] + except KeyError: + if res: return res + raise IndexError, nodeid + return res + + def pack(self, pack_before): + """ Delete all journal entries except "create" before 'pack_before'. + """ + pack_before = pack_before.serialise() + for classname in self.journals: + db = self.journals[classname] + for key in db: + # get the journal for this db entry + l = [] + last_set_entry = None + for entry in db[key]: + # unpack the entry + (nodeid, date_stamp, self.journaltag, action, + params) = entry + date_stamp = date_stamp.serialise() + # if the entry is after the pack date, _or_ the initial + # create entry, then it stays + if date_stamp > pack_before or action == 'create': + l.append(entry) + db[key] = l + +class Class(back_anydbm.Class): + pass + +class FileClass(back_anydbm.FileClass): + def __init__(self, db, classname, **properties): + if not properties.has_key('content'): + properties['content'] = hyperdb.String(indexme='yes') + if not properties.has_key('type'): + properties['type'] = hyperdb.String() + back_anydbm.Class.__init__(self, db, classname, **properties) + + def export_files(self, dirname, nodeid): + dest = self.exportFilename(dirname, nodeid) + ensureParentsExist(dest) + f = open(dest, 'wb') + f.write(self.db.files[self.classname, nodeid, None]) + f.close() + + def import_files(self, dirname, nodeid): + source = self.exportFilename(dirname, nodeid) + f = open(source, 'rb') + self.db.files[self.classname, nodeid, None] = f.read() + f.close() + mime_type = None + props = self.getprops() + if props.has_key('type'): + mime_type = self.get(nodeid, 'type') + if not mime_type: + mime_type = self.default_mime_type + if props['content'].indexme: + self.db.indexer.add_text((self.classname, nodeid, 'content'), + self.get(nodeid, 'content'), mime_type) + +# deviation from spec - was called ItemClass +class IssueClass(Class, roundupdb.IssueClass): + # Overridden methods: + def __init__(self, db, classname, **properties): + """The newly-created class automatically includes the "messages", + "files", "nosy", and "superseder" properties. If the 'properties' + dictionary attempts to specify any of these properties or a + "creation" or "activity" property, a ValueError is raised. + """ + if not properties.has_key('title'): + properties['title'] = hyperdb.String(indexme='yes') + if not properties.has_key('messages'): + properties['messages'] = hyperdb.Multilink("msg") + if not properties.has_key('files'): + properties['files'] = hyperdb.Multilink("file") + if not properties.has_key('nosy'): + # note: journalling is turned off as it really just wastes + # space. this behaviour may be overridden in an instance + properties['nosy'] = hyperdb.Multilink("user", do_journal="no") + if not properties.has_key('superseder'): + properties['superseder'] = hyperdb.Multilink(classname) + Class.__init__(self, db, classname, **properties) + +# vim: set et sts=4 sw=4 : Modified: tracker/roundup-src/test/session_common.py ============================================================================== --- tracker/roundup-src/test/session_common.py (original) +++ tracker/roundup-src/test/session_common.py Thu Aug 4 15:46:52 2011 @@ -20,6 +20,23 @@ if os.path.exists(config.DATABASE): shutil.rmtree(config.DATABASE) + def testList(self): + self.sessions.list() + self.sessions.set('random_key', text='hello, world!') + self.sessions.list() + + def testGetAll(self): + self.sessions.set('random_key', text='hello, world!') + self.assertEqual(self.sessions.getall('random_key'), + {'text': 'hello, world!'}) + + def testDestroy(self): + self.sessions.set('random_key', text='hello, world!') + self.assertEquals(self.sessions.getall('random_key'), + {'text': 'hello, world!'}) + self.sessions.destroy('random_key') + self.assertRaises(KeyError, self.sessions.getall, 'random_key') + def testSetSession(self): self.sessions.set('random_key', text='hello, world!') self.assertEqual(self.sessions.get('random_key', 'text'), Modified: tracker/roundup-src/test/test_actions.py ============================================================================== --- tracker/roundup-src/test/test_actions.py (original) +++ tracker/roundup-src/test/test_actions.py Thu Aug 4 15:46:52 2011 @@ -249,6 +249,7 @@ ({'messages':hyperdb.Multilink('msg') ,'content':hyperdb.String() ,'files':hyperdb.Multilink('file') + ,'msg':hyperdb.Link('msg') }) self.action = EditItemAction(self.client) @@ -298,6 +299,19 @@ ) try : self.action.handle() + except Redirect, msg: + pass + self.assertEqual(expect, self.result) + + def testLinkNewToExisting(self): + expect = [('create',(),{'msg':'1','title':'TEST'})] + self.client.db.classes.get = lambda a, b:['23','42'] + self.client.parsePropsFromForm = lambda: \ + ( {('issue','-1'):{'title':'TEST'},('msg','1'):{}} + , [('issue','-1','msg',[('msg','1')])] + ) + try : + self.action.handle() except Redirect, msg: pass self.assertEqual(expect, self.result) Modified: tracker/roundup-src/test/test_cgi.py ============================================================================== --- tracker/roundup-src/test/test_cgi.py (original) +++ tracker/roundup-src/test/test_cgi.py Thu Aug 4 15:46:52 2011 @@ -14,7 +14,7 @@ from roundup.cgi import client, actions, exceptions from roundup.cgi.exceptions import FormError -from roundup.cgi.templating import HTMLItem +from roundup.cgi.templating import HTMLItem, HTMLRequest from roundup.cgi.form_parser import FormParser from roundup import init, instance, password, hyperdb, date @@ -425,6 +425,44 @@ ':confirm:password': ''}, 'user', nodeid), ({('user', nodeid): {}}, [])) + def testPasswordMigration(self): + chef = self.db.user.lookup('Chef') + form = dict(__login_name='Chef', __login_password='foo') + cl = self._make_client(form) + # assume that the "best" algorithm is the first one and doesn't + # need migration, all others should be migrated. + for scheme in password.Password.deprecated_schemes: + pw1 = password.Password('foo', scheme=scheme) + self.assertEqual(pw1.needs_migration(), True) + self.db.user.set(chef, password=pw1) + self.db.commit() + actions.LoginAction(cl).handle() + pw = self.db.user.get(chef, 'password') + self.assertEqual(pw, 'foo') + self.assertEqual(pw.needs_migration(), False) + pw1 = pw + self.assertEqual(pw1.needs_migration(), False) + scheme = password.Password.known_schemes[0] + self.assertEqual(scheme, pw1.scheme) + actions.LoginAction(cl).handle() + pw = self.db.user.get(chef, 'password') + self.assertEqual(pw, 'foo') + self.assertEqual(pw, pw1) + + def testPasswordConfigOption(self): + chef = self.db.user.lookup('Chef') + form = dict(__login_name='Chef', __login_password='foo') + cl = self._make_client(form) + self.db.config.PASSWORD_PBKDF2_DEFAULT_ROUNDS = 1000 + pw1 = password.Password('foo', scheme='crypt') + self.assertEqual(pw1.needs_migration(), True) + self.db.user.set(chef, password=pw1) + self.db.commit() + actions.LoginAction(cl).handle() + pw = self.db.user.get(chef, 'password') + self.assertEqual('PBKDF2', pw.scheme) + self.assertEqual(1000, password.pbkdf2_unpack(pw.password)[0]) + # # Boolean # @@ -616,14 +654,18 @@ # SECURITY # # XXX test all default permissions - def _make_client(self, form, classname='user', nodeid='1', userid='2'): + def _make_client(self, form, classname='user', nodeid='1', + userid='2', template='item'): cl = client.Client(self.instance, None, {'PATH_INFO':'/', 'REQUEST_METHOD':'POST'}, makeForm(form)) - cl.classname = 'user' - cl.nodeid = nodeid + cl.classname = classname + if nodeid is not None: + cl.nodeid = nodeid cl.db = self.db cl.userid = userid cl.language = ('en',) + cl.error_message = [] + cl.template = template return cl def testClassPermission(self): @@ -636,7 +678,8 @@ def testCheckAndPropertyPermission(self): self.db.security.permissions = {} - def own_record(db, userid, itemid): return userid == itemid + def own_record(db, userid, itemid): + return userid == itemid p = self.db.security.addPermission(name='Edit', klass='user', check=own_record, properties=("password", )) self.db.security.addPermissionToRole('User', p) @@ -644,10 +687,231 @@ cl = self._make_client(dict(username='bob')) self.assertRaises(exceptions.Unauthorised, actions.EditItemAction(cl).handle) + cl = self._make_client(dict(roles='User,Admin'), userid='4', nodeid='4') + self.assertRaises(exceptions.Unauthorised, + actions.EditItemAction(cl).handle) + cl = self._make_client(dict(roles='User,Admin'), userid='4') + self.assertRaises(exceptions.Unauthorised, + actions.EditItemAction(cl).handle) + cl = self._make_client(dict(roles='User,Admin')) + self.assertRaises(exceptions.Unauthorised, + actions.EditItemAction(cl).handle) + # working example, mary may change her pw + cl = self._make_client({'password':'ob', '@confirm at password':'ob'}, + nodeid='4', userid='4') + self.assertRaises(exceptions.Redirect, + actions.EditItemAction(cl).handle) cl = self._make_client({'password':'bob', '@confirm at password':'bob'}) self.failUnlessRaises(exceptions.Unauthorised, actions.EditItemAction(cl).handle) + def testCreatePermission(self): + # this checks if we properly differentiate between create and + # edit permissions + self.db.security.permissions = {} + self.db.security.addRole(name='UserAdd') + # Don't allow roles + p = self.db.security.addPermission(name='Create', klass='user', + properties=("username", "password", "address", + "alternate_address", "realname", "phone", "organisation", + "timezone")) + self.db.security.addPermissionToRole('UserAdd', p) + # Don't allow roles *and* don't allow username + p = self.db.security.addPermission(name='Edit', klass='user', + properties=("password", "address", "alternate_address", + "realname", "phone", "organisation", "timezone")) + self.db.security.addPermissionToRole('UserAdd', p) + self.db.user.set('4', roles='UserAdd') + + # anonymous may not + cl = self._make_client({'username':'new_user', 'password':'secret', + '@confirm at password':'secret', 'address':'new_user at bork.bork', + 'roles':'Admin'}, nodeid=None, userid='2') + self.assertRaises(exceptions.Unauthorised, + actions.NewItemAction(cl).handle) + # Don't allow creating new user with roles + cl = self._make_client({'username':'new_user', 'password':'secret', + '@confirm at password':'secret', 'address':'new_user at bork.bork', + 'roles':'Admin'}, nodeid=None, userid='4') + self.assertRaises(exceptions.Unauthorised, + actions.NewItemAction(cl).handle) + self.assertEqual(cl.error_message,[]) + # this should work + cl = self._make_client({'username':'new_user', 'password':'secret', + '@confirm at password':'secret', 'address':'new_user at bork.bork'}, + nodeid=None, userid='4') + self.assertRaises(exceptions.Redirect, + actions.NewItemAction(cl).handle) + self.assertEqual(cl.error_message,[]) + # don't allow changing (my own) username (in this example) + cl = self._make_client(dict(username='new_user42'), userid='4') + self.assertRaises(exceptions.Unauthorised, + actions.EditItemAction(cl).handle) + cl = self._make_client(dict(username='new_user42'), userid='4', + nodeid='4') + self.assertRaises(exceptions.Unauthorised, + actions.EditItemAction(cl).handle) + # don't allow changing (my own) roles + cl = self._make_client(dict(roles='User,Admin'), userid='4', + nodeid='4') + self.assertRaises(exceptions.Unauthorised, + actions.EditItemAction(cl).handle) + cl = self._make_client(dict(roles='User,Admin'), userid='4') + self.assertRaises(exceptions.Unauthorised, + actions.EditItemAction(cl).handle) + cl = self._make_client(dict(roles='User,Admin')) + self.assertRaises(exceptions.Unauthorised, + actions.EditItemAction(cl).handle) + + def testSearchPermission(self): + # this checks if we properly check for search permissions + self.db.security.permissions = {} + self.db.security.addRole(name='User') + self.db.security.addRole(name='Project') + self.db.security.addPermissionToRole('User', 'Web Access') + self.db.security.addPermissionToRole('Project', 'Web Access') + # Allow viewing department + p = self.db.security.addPermission(name='View', klass='department') + self.db.security.addPermissionToRole('User', p) + # Allow viewing interesting things (but not department) on iss + # But users might only view issues where they are on nosy + # (so in the real world the check method would be better) + p = self.db.security.addPermission(name='View', klass='iss', + properties=("title", "status"), check=lambda x,y,z: True) + self.db.security.addPermissionToRole('User', p) + # Allow all relevant roles access to stat + p = self.db.security.addPermission(name='View', klass='stat') + self.db.security.addPermissionToRole('User', p) + self.db.security.addPermissionToRole('Project', p) + # Allow role "Project" access to whole iss + p = self.db.security.addPermission(name='View', klass='iss') + self.db.security.addPermissionToRole('Project', p) + + department = self.instance.backend.Class(self.db, "department", + name=hyperdb.String()) + status = self.instance.backend.Class(self.db, "stat", + name=hyperdb.String()) + issue = self.instance.backend.Class(self.db, "iss", + title=hyperdb.String(), status=hyperdb.Link('stat'), + department=hyperdb.Link('department')) + + d1 = department.create(name='d1') + d2 = department.create(name='d2') + open = status.create(name='open') + closed = status.create(name='closed') + issue.create(title='i1', status=open, department=d2) + issue.create(title='i2', status=open, department=d1) + issue.create(title='i2', status=closed, department=d1) + + chef = self.db.user.lookup('Chef') + mary = self.db.user.lookup('mary') + self.db.user.set(chef, roles = 'User, Project') + + perm = self.db.security.hasPermission + search = self.db.security.hasSearchPermission + self.assert_(perm('View', chef, 'iss', 'department', '1')) + self.assert_(perm('View', chef, 'iss', 'department', '2')) + self.assert_(perm('View', chef, 'iss', 'department', '3')) + self.assert_(search(chef, 'iss', 'department')) + + self.assert_(not perm('View', mary, 'iss', 'department')) + self.assert_(perm('View', mary, 'iss', 'status')) + # Conditionally allow view of whole iss (check is False here, + # this might check for department owner in the real world) + p = self.db.security.addPermission(name='View', klass='iss', + check=lambda x,y,z: False) + self.db.security.addPermissionToRole('User', p) + self.assert_(perm('View', mary, 'iss', 'department')) + self.assert_(not perm('View', mary, 'iss', 'department', '1')) + self.assert_(not search(mary, 'iss', 'department')) + + self.assert_(perm('View', mary, 'iss', 'status')) + self.assert_(not search(mary, 'iss', 'status')) + # Allow user to search for iss.status + p = self.db.security.addPermission(name='Search', klass='iss', + properties=("status",)) + self.db.security.addPermissionToRole('User', p) + self.assert_(search(mary, 'iss', 'status')) + + dep = {'@action':'search','columns':'id','@filter':'department', + 'department':'1'} + stat = {'@action':'search','columns':'id','@filter':'status', + 'status':'1'} + depsort = {'@action':'search','columns':'id','@sort':'department'} + depgrp = {'@action':'search','columns':'id','@group':'department'} + + # Filter on department ignored for role 'User': + cl = self._make_client(dep, classname='iss', nodeid=None, userid=mary, + template='index') + h = HTMLRequest(cl) + self.assertEqual([x.id for x in h.batch()],['1', '2', '3']) + # Filter on department works for role 'Project': + cl = self._make_client(dep, classname='iss', nodeid=None, userid=chef, + template='index') + h = HTMLRequest(cl) + self.assertEqual([x.id for x in h.batch()],['2', '3']) + # Filter on status works for all: + cl = self._make_client(stat, classname='iss', nodeid=None, userid=mary, + template='index') + h = HTMLRequest(cl) + self.assertEqual([x.id for x in h.batch()],['1', '2']) + cl = self._make_client(stat, classname='iss', nodeid=None, userid=chef, + template='index') + h = HTMLRequest(cl) + self.assertEqual([x.id for x in h.batch()],['1', '2']) + # Sorting and grouping for class Project works: + cl = self._make_client(depsort, classname='iss', nodeid=None, + userid=chef, template='index') + h = HTMLRequest(cl) + self.assertEqual([x.id for x in h.batch()],['2', '3', '1']) + cl = self._make_client(depgrp, classname='iss', nodeid=None, + userid=chef, template='index') + h = HTMLRequest(cl) + self.assertEqual([x.id for x in h.batch()],['2', '3', '1']) + # Sorting and grouping for class User fails: + cl = self._make_client(depsort, classname='iss', nodeid=None, + userid=mary, template='index') + h = HTMLRequest(cl) + self.assertEqual([x.id for x in h.batch()],['1', '2', '3']) + cl = self._make_client(depgrp, classname='iss', nodeid=None, + userid=mary, template='index') + h = HTMLRequest(cl) + self.assertEqual([x.id for x in h.batch()],['1', '2', '3']) + + def testEditCSV(self): + form = dict(rows='id,name\n1,newkey') + cl = self._make_client(form, userid='1', classname='keyword') + cl.ok_message = [] + actions.EditCSVAction(cl).handle() + self.assertEqual(cl.ok_message, ['Items edited OK']) + k = self.db.keyword.getnode('1') + self.assertEqual(k.name, 'newkey') + form = dict(rows=u'id,name\n1,\xe4\xf6\xfc'.encode('utf-8')) + cl = self._make_client(form, userid='1', classname='keyword') + cl.ok_message = [] + actions.EditCSVAction(cl).handle() + self.assertEqual(cl.ok_message, ['Items edited OK']) + k = self.db.keyword.getnode('1') + self.assertEqual(k.name, u'\xe4\xf6\xfc'.encode('utf-8')) + + def testRoles(self): + cl = self._make_client({}) + self.db.user.set('1', roles='aDmin, uSer') + item = HTMLItem(cl, 'user', '1') + self.assert_(item.hasRole('Admin')) + self.assert_(item.hasRole('User')) + self.assert_(item.hasRole('AdmiN')) + self.assert_(item.hasRole('UseR')) + self.assert_(item.hasRole('UseR','Admin')) + self.assert_(item.hasRole('UseR','somethingelse')) + self.assert_(item.hasRole('somethingelse','Admin')) + self.assert_(not item.hasRole('userr')) + self.assert_(not item.hasRole('adminn')) + self.assert_(not item.hasRole('')) + self.assert_(not item.hasRole(' ')) + self.db.user.set('1', roles='') + self.assert_(not item.hasRole('')) + def testCSVExport(self): cl = self._make_client({'@columns': 'id,name'}, nodeid=None, userid='1') Modified: tracker/roundup-src/test/test_dates.py ============================================================================== --- tracker/roundup-src/test/test_dates.py (original) +++ tracker/roundup-src/test/test_dates.py Thu Aug 4 15:46:52 2011 @@ -23,11 +23,21 @@ import datetime import calendar +from roundup import date, i18n from roundup.date import Date, Interval, Range, fixTimeOverflow, \ get_timezone class DateTestCase(unittest.TestCase): + def setUp(self): + self.old_gettext_ = i18n.gettext + self.old_ngettext_ = i18n.ngettext + i18n.gettext = i18n.get_translation(language='C').gettext + i18n.ngettext = i18n.get_translation(language='C').ngettext + + def tearDown(self): + i18n.gettext = self.old_gettext_ + i18n.ngettext = self.old_ngettext_ def testDateInterval(self): ae = self.assertEqual Modified: tracker/roundup-src/test/test_indexer.py ============================================================================== --- tracker/roundup-src/test/test_indexer.py (original) +++ tracker/roundup-src/test/test_indexer.py Thu Aug 4 15:46:52 2011 @@ -124,6 +124,14 @@ [('test', '1', 'a'), ('test', '2', 'a')]) + def test_wordsplitting(self): + """Test if word splitting works.""" + self.dex.add_text(('test', '1', 'a'), 'aaaa-aaa bbbb*bbb') + self.dex.add_text(('test', '2', 'a'), 'aaaA-aaa BBBB*BBB') + for k in 'aaaa', 'aaa', 'bbbb', 'bbb': + self.assertSeqEqual(self.dex.find([k]), + [('test', '1', 'a'), ('test', '2', 'a')]) + def tearDown(self): shutil.rmtree('test-index') Added: tracker/roundup-src/test/test_mailer.py ============================================================================== --- (empty file) +++ tracker/roundup-src/test/test_mailer.py Thu Aug 4 15:46:52 2011 @@ -0,0 +1,24 @@ +#-*- encoding: utf8 -*- +import unittest + +from roundup import mailer + +class EncodingTestCase(unittest.TestCase): + def testEncoding(self): + a = lambda n, a, c, o: self.assertEquals(mailer.nice_sender_header(n, + a, c), o) + a('ascii', 'ascii at test.com', 'iso8859-1', 'ascii ') + a(u'caf??', 'ascii at test.com', 'iso8859-1', + '=?iso8859-1?q?caf=E9?= ') + a('as"ii', 'ascii at test.com', 'iso8859-1', '"as\\"ii" ') + +def test_suite(): + suite = unittest.TestSuite() + suite.addTest(unittest.makeSuite(EncodingTestCase)) + return suite + +if __name__ == '__main__': + runner = unittest.TextTestRunner() + unittest.main(testRunner=runner) + +# vim: set et sts=4 sw=4 : Modified: tracker/roundup-src/test/test_mailgw.py ============================================================================== --- tracker/roundup-src/test/test_mailgw.py (original) +++ tracker/roundup-src/test/test_mailgw.py Thu Aug 4 15:46:52 2011 @@ -21,12 +21,14 @@ os.environ['SENDMAILDEBUG'] = 'mail-test.log' SENDMAILDEBUG = os.environ['SENDMAILDEBUG'] +from roundup import mailgw, i18n, roundupdb from roundup.mailgw import MailGW, Unauthorized, uidFromAddress, \ parseContent, IgnoreLoop, IgnoreBulk, MailUsageError, MailUsageHelp from roundup import init, instance, password, rfc2822, __version__ from roundup.anypy.sets_ import set -import db_test_base +#import db_test_base +import memorydb class Message(rfc822.Message): """String-based Message class with equivalence test.""" @@ -37,6 +39,10 @@ return (self.dict == other.dict and self.fp.read() == other.fp.read()) +class Tracker(object): + def open(self, journaltag): + return self.db + class DiffHelper: def compareMessages(self, new, old): """Compare messages for semantic equivalence.""" @@ -78,7 +84,7 @@ res.extend(body_diff) if res: - res.insert(0, 'Generated message not correct (diff follows):') + res.insert(0, 'Generated message not correct (diff follows, expected vs. actual):') raise AssertionError, '\n'.join(res) def compareStrings(self, s2, s1, replace={}): @@ -114,13 +120,17 @@ count = 0 schema = 'classic' def setUp(self): + self.old_translate_ = mailgw._ + roundupdb._ = mailgw._ = i18n.get_translation(language='C').gettext MailgwTestCase.count = MailgwTestCase.count + 1 - self.dirname = '_test_mailgw_%s'%self.count - # set up and open a tracker - self.instance = db_test_base.setupTracker(self.dirname) - # and open the database - self.db = self.instance.open('admin') + # and open the database / "instance" + self.db = memorydb.create('admin') + self.instance = Tracker() + self.instance.db = self.db + self.instance.config = self.db.config + self.instance.MailGW = MailGW + self.chef_id = self.db.user.create(username='Chef', address='chef at bork.bork.bork', realname='Bork, Chef', roles='User') self.richard_id = self.db.user.create(username='richard', @@ -130,27 +140,27 @@ self.john_id = self.db.user.create(username='john', address='john at test.test', roles='User', realname='John Doe', alternate_addresses='jondoe at test.test\njohn.doe at test.test') + self.rgg_id = self.db.user.create(username='rgg', + address='rgg at test.test', roles='User') def tearDown(self): + roundupdb._ = mailgw._ = self.old_translate_ if os.path.exists(SENDMAILDEBUG): os.remove(SENDMAILDEBUG) self.db.close() - try: - shutil.rmtree(self.dirname) - except OSError, error: - if error.errno not in (errno.ENOENT, errno.ESRCH): raise - - def _handle_mail(self, message): - # handler will open a new db handle. On single-threaded - # databases we'll have to close our current connection - self.db.commit() - self.db.close() - handler = self.instance.MailGW(self.instance) + + def _create_mailgw(self, message, args=()): + class MailGW(self.instance.MailGW): + def handle_message(self, message): + return self._handle_message(message) + handler = MailGW(self.instance, args) + handler.db = self.db + return handler + + def _handle_mail(self, message, args=()): + handler = self._create_mailgw(message, args) handler.trapExceptions = 0 - ret = handler.main(StringIO(message)) - # handler had its own database, open new connection - self.db = self.instance.open('admin') - return ret + return handler.main(StringIO(message)) def _get_mail(self): f = open(SENDMAILDEBUG) @@ -173,6 +183,99 @@ assert not os.path.exists(SENDMAILDEBUG) self.assertEqual(self.db.issue.get(nodeid, 'title'), 'Testing...') + def testMessageWithFromInIt(self): + nodeid = self._handle_mail('''Content-Type: text/plain; + charset="iso-8859-1" +From: Chef +To: issue_tracker at your.tracker.email.domain.example +Cc: richard at test.test +Reply-To: chef at bork.bork.bork +Message-Id: +Subject: [issue] Testing... + +From here to there! +''') + assert not os.path.exists(SENDMAILDEBUG) + msgid = self.db.issue.get(nodeid, 'messages')[0] + self.assertEqual(self.db.msg.get(msgid, 'content'), 'From here to there!') + + def testNoMessageId(self): + self.instance.config['MAIL_DOMAIN'] = 'example.com' + nodeid = self._handle_mail('''Content-Type: text/plain; + charset="iso-8859-1" +From: Chef +To: issue_tracker at your.tracker.email.domain.example +Cc: richard at test.test +Reply-To: chef at bork.bork.bork +Subject: [issue] Testing... + +Hi there! +''') + assert not os.path.exists(SENDMAILDEBUG) + msgid = self.db.issue.get(nodeid, 'messages')[0] + messageid = self.db.msg.get(msgid, 'messageid') + x1, x2 = messageid.split('@') + self.assertEqual(x2, 'example.com>') + x = x1.split('.')[-1] + self.assertEqual(x, 'issueNone') + nodeid = self._handle_mail('''Content-Type: text/plain; + charset="iso-8859-1" +From: Chef +To: issue_tracker at your.tracker.email.domain.example +Subject: [issue%(nodeid)s] Testing... + +Just a test reply +'''%locals()) + msgid = self.db.issue.get(nodeid, 'messages')[-1] + messageid = self.db.msg.get(msgid, 'messageid') + x1, x2 = messageid.split('@') + self.assertEqual(x2, 'example.com>') + x = x1.split('.')[-1] + self.assertEqual(x, "issue%s"%nodeid) + + def testOptions(self): + nodeid = self._handle_mail('''Content-Type: text/plain; + charset="iso-8859-1" +From: Chef +To: issue_tracker at your.tracker.email.domain.example +Message-Id: +Reply-To: chef at bork.bork.bork +Subject: [issue] Testing... + +Hi there! +''', (('-C', 'issue'), ('-S', 'status=chatting;priority=critical'))) + self.assertEqual(self.db.issue.get(nodeid, 'status'), '3') + self.assertEqual(self.db.issue.get(nodeid, 'priority'), '1') + + def testOptionsMulti(self): + nodeid = self._handle_mail('''Content-Type: text/plain; + charset="iso-8859-1" +From: Chef +To: issue_tracker at your.tracker.email.domain.example +Message-Id: +Reply-To: chef at bork.bork.bork +Subject: [issue] Testing... + +Hi there! +''', (('-C', 'issue'), ('-S', 'status=chatting'), ('-S', 'priority=critical'))) + self.assertEqual(self.db.issue.get(nodeid, 'status'), '3') + self.assertEqual(self.db.issue.get(nodeid, 'priority'), '1') + + def testOptionClass(self): + nodeid = self._handle_mail('''Content-Type: text/plain; + charset="iso-8859-1" +From: Chef +To: issue_tracker at your.tracker.email.domain.example +Message-Id: +Reply-To: chef at bork.bork.bork +Subject: [issue] Testing... [status=chatting;priority=critical] + +Hi there! +''', (('-c', 'issue'),)) + self.assertEqual(self.db.issue.get(nodeid, 'title'), 'Testing...') + self.assertEqual(self.db.issue.get(nodeid, 'status'), '3') + self.assertEqual(self.db.issue.get(nodeid, 'priority'), '1') + def doNewIssue(self): nodeid = self._handle_mail('''Content-Type: text/plain; charset="iso-8859-1" @@ -257,7 +360,8 @@ Subject: [issue1] Testing... To: chef at bork.bork.bork, mary at test.test, richard at test.test From: "Bork, Chef" -Reply-To: Roundup issue tracker +Reply-To: Roundup issue tracker + MIME-Version: 1.0 Message-Id: X-Roundup-Name: Roundup issue tracker @@ -301,7 +405,8 @@ Subject: [issue1] Testing... To: mary at test.test, richard at test.test From: "Bork, Chef" -Reply-To: Roundup issue tracker +Reply-To: Roundup issue tracker + MIME-Version: 1.0 Message-Id: X-Roundup-Name: Roundup issue tracker @@ -342,7 +447,8 @@ Subject: [issue1] Testing... To: mary at test.test, richard at test.test From: "Bork, Chef" -Reply-To: Roundup issue tracker +Reply-To: Roundup issue tracker + MIME-Version: 1.0 Message-Id: X-Roundup-Name: Roundup issue tracker @@ -431,6 +537,77 @@ --bxyzzy-- ''' + multipart_msg_latin1 = '''From: mary +To: issue_tracker at your.tracker.email.domain.example +Message-Id: +In-Reply-To: +Subject: [issue1] Testing... +Content-Type: multipart/alternative; boundary=001485f339f8f361fb049188dbba + + +--001485f339f8f361fb049188dbba +Content-Type: text/plain; charset=ISO-8859-1 +Content-Transfer-Encoding: quoted-printable + +umlaut =E4=F6=FC=C4=D6=DC=DF + +--001485f339f8f361fb049188dbba +Content-Type: text/html; charset=ISO-8859-1 +Content-Transfer-Encoding: quoted-printable + +umlaut =E4=F6=FC=C4=D6=DC=DF + +--001485f339f8f361fb049188dbba-- +''' + + multipart_msg_rfc822 = '''From: mary +To: issue_tracker at your.tracker.email.domain.example +Message-Id: +In-Reply-To: +Subject: [issue1] Testing... +Content-Type: multipart/mixed; boundary=001485f339f8f361fb049188dbba + +This is a multi-part message in MIME format. +--001485f339f8f361fb049188dbba +Content-Type: text/plain; charset=ISO-8859-15 +Content-Transfer-Encoding: 7bit + +First part: Text + +--001485f339f8f361fb049188dbba +Content-Type: message/rfc822; name="Fwd: Original email subject.eml" +Content-Transfer-Encoding: 7bit +Content-Disposition: attachment; filename="Fwd: Original email subject.eml" + +Message-Id: +In-Reply-To: +MIME-Version: 1.0 +Subject: Fwd: Original email subject +Date: Mon, 23 Aug 2010 08:23:33 +0200 +Content-Type: multipart/alternative; boundary="090500050101020406060002" + +This is a multi-part message in MIME format. +--090500050101020406060002 +Content-Type: text/plain; charset=ISO-8859-15; format=flowed +Content-Transfer-Encoding: 7bit + +some text in inner email +======================== + +--090500050101020406060002 +Content-Type: text/html; charset=ISO-8859-15 +Content-Transfer-Encoding: 7bit + + +some text in inner email +======================== + + +--090500050101020406060002-- + +--001485f339f8f361fb049188dbba-- +''' + def testMultipartKeepAlternatives(self): self.doNewIssue() self._handle_mail(self.multipart_msg) @@ -448,23 +625,412 @@ self.assertEqual(f.content, content [n]) self.assertEqual(msg.content, 'test attachment second text/plain') - def testMultipartDropAlternatives(self): + def testMultipartSeveralAttachmentMessages(self): + self.doNewIssue() + self._handle_mail(self.multipart_msg) + messages = self.db.issue.get('1', 'messages') + messages.sort() + self.assertEqual(messages[-1], '2') + msg = self.db.msg.getnode (messages[-1]) + self.assertEqual(len(msg.files), 5) + issue = self.db.issue.getnode ('1') + self.assertEqual(len(issue.files), 5) + names = {0 : 'first.dvi', 4 : 'second.dvi'} + content = {3 : 'test attachment third text/plain\n', + 4 : 'Just a test\n'} + for n, id in enumerate (msg.files): + f = self.db.file.getnode (id) + self.assertEqual(f.name, names.get (n, 'unnamed')) + if n in content : + self.assertEqual(f.content, content [n]) + self.assertEqual(msg.content, 'test attachment second text/plain') + self.assertEqual(msg.files, ['1', '2', '3', '4', '5']) + self.assertEqual(issue.files, ['1', '2', '3', '4', '5']) + + self._handle_mail(self.multipart_msg) + issue = self.db.issue.getnode ('1') + self.assertEqual(len(issue.files), 10) + messages = self.db.issue.get('1', 'messages') + messages.sort() + self.assertEqual(messages[-1], '3') + msg = self.db.msg.getnode (messages[-1]) + self.assertEqual(issue.files, [str(i+1) for i in range(10)]) + self.assertEqual(msg.files, ['6', '7', '8', '9', '10']) + + def testMultipartKeepFiles(self): + self.doNewIssue() + self._handle_mail(self.multipart_msg) + messages = self.db.issue.get('1', 'messages') + messages.sort() + msg = self.db.msg.getnode (messages[-1]) + self.assertEqual(len(msg.files), 5) + issue = self.db.issue.getnode ('1') + self.assertEqual(len(issue.files), 5) + names = {0 : 'first.dvi', 4 : 'second.dvi'} + content = {3 : 'test attachment third text/plain\n', + 4 : 'Just a test\n'} + for n, id in enumerate (msg.files): + f = self.db.file.getnode (id) + self.assertEqual(f.name, names.get (n, 'unnamed')) + if n in content : + self.assertEqual(f.content, content [n]) + self.assertEqual(msg.content, 'test attachment second text/plain') + self._handle_mail('''From: mary +To: issue_tracker at your.tracker.email.domain.example +Message-Id: +In-Reply-To: +Subject: [issue1] Testing... + +This ist a message without attachment +''') + issue = self.db.issue.getnode ('1') + self.assertEqual(len(issue.files), 5) + self.assertEqual(issue.files, ['1', '2', '3', '4', '5']) + + def testMultipartDropAlternatives(self): + self.doNewIssue() + self.db.config.MAILGW_IGNORE_ALTERNATIVES = True + self._handle_mail(self.multipart_msg) + messages = self.db.issue.get('1', 'messages') + messages.sort() + msg = self.db.msg.getnode (messages[-1]) + self.assertEqual(len(msg.files), 2) + names = {1 : 'second.dvi'} + content = {0 : 'test attachment third text/plain\n', + 1 : 'Just a test\n'} + for n, id in enumerate (msg.files): + f = self.db.file.getnode (id) + self.assertEqual(f.name, names.get (n, 'unnamed')) + if n in content : + self.assertEqual(f.content, content [n]) + self.assertEqual(msg.content, 'test attachment second text/plain') + + def testMultipartCharsetUTF8NoAttach(self): + c = 'umlaut \xc3\xa4\xc3\xb6\xc3\xbc\xc3\x84\xc3\x96\xc3\x9c\xc3\x9f' + self.doNewIssue() + self.db.config.NOSY_MAX_ATTACHMENT_SIZE = 0 + self._handle_mail(self.multipart_msg_latin1) + messages = self.db.issue.get('1', 'messages') + messages.sort() + msg = self.db.msg.getnode (messages[-1]) + self.assertEqual(len(msg.files), 1) + name = 'unnamed' + content = '' + c + '\n' + for n, id in enumerate (msg.files): + f = self.db.file.getnode (id) + self.assertEqual(f.name, name) + self.assertEqual(f.content, content) + self.assertEqual(msg.content, c) + self.compareMessages(self._get_mail(), +'''FROM: roundup-admin at your.tracker.email.domain.example +TO: chef at bork.bork.bork, richard at test.test +Content-Type: text/plain; charset="utf-8" +Subject: [issue1] Testing... +To: chef at bork.bork.bork, richard at test.test +From: "Contrary, Mary" +Reply-To: Roundup issue tracker + +MIME-Version: 1.0 +Message-Id: +In-Reply-To: +X-Roundup-Name: Roundup issue tracker +X-Roundup-Loop: hello +X-Roundup-Issue-Status: chatting +X-Roundup-Issue-Files: unnamed +Content-Transfer-Encoding: quoted-printable + + +Contrary, Mary added the comment: + +umlaut =C3=A4=C3=B6=C3=BC=C3=84=C3=96=C3=9C=C3=9F +File 'unnamed' not attached - you can download it from http://tracker.examp= +le/cgi-bin/roundup.cgi/bugs/file1. + +---------- +status: unread -> chatting + +_______________________________________________________________________ +Roundup issue tracker + +_______________________________________________________________________ +''') + + def testMultipartCharsetLatin1NoAttach(self): + c = 'umlaut \xc3\xa4\xc3\xb6\xc3\xbc\xc3\x84\xc3\x96\xc3\x9c\xc3\x9f' + self.doNewIssue() + self.db.config.NOSY_MAX_ATTACHMENT_SIZE = 0 + self.db.config.MAIL_CHARSET = 'iso-8859-1' + self._handle_mail(self.multipart_msg_latin1) + messages = self.db.issue.get('1', 'messages') + messages.sort() + msg = self.db.msg.getnode (messages[-1]) + self.assertEqual(len(msg.files), 1) + name = 'unnamed' + content = '' + c + '\n' + for n, id in enumerate (msg.files): + f = self.db.file.getnode (id) + self.assertEqual(f.name, name) + self.assertEqual(f.content, content) + self.assertEqual(msg.content, c) + self.compareMessages(self._get_mail(), +'''FROM: roundup-admin at your.tracker.email.domain.example +TO: chef at bork.bork.bork, richard at test.test +Content-Type: text/plain; charset="iso-8859-1" +Subject: [issue1] Testing... +To: chef at bork.bork.bork, richard at test.test +From: "Contrary, Mary" +Reply-To: Roundup issue tracker + +MIME-Version: 1.0 +Message-Id: +In-Reply-To: +X-Roundup-Name: Roundup issue tracker +X-Roundup-Loop: hello +X-Roundup-Issue-Status: chatting +X-Roundup-Issue-Files: unnamed +Content-Transfer-Encoding: quoted-printable + + +Contrary, Mary added the comment: + +umlaut =E4=F6=FC=C4=D6=DC=DF +File 'unnamed' not attached - you can download it from http://tracker.examp= +le/cgi-bin/roundup.cgi/bugs/file1. + +---------- +status: unread -> chatting + +_______________________________________________________________________ +Roundup issue tracker + +_______________________________________________________________________ +''') + + def testMultipartCharsetUTF8AttachFile(self): + c = 'umlaut \xc3\xa4\xc3\xb6\xc3\xbc\xc3\x84\xc3\x96\xc3\x9c\xc3\x9f' + self.doNewIssue() + self._handle_mail(self.multipart_msg_latin1) + messages = self.db.issue.get('1', 'messages') + messages.sort() + msg = self.db.msg.getnode (messages[-1]) + self.assertEqual(len(msg.files), 1) + name = 'unnamed' + content = '' + c + '\n' + for n, id in enumerate (msg.files): + f = self.db.file.getnode (id) + self.assertEqual(f.name, name) + self.assertEqual(f.content, content) + self.assertEqual(msg.content, c) + self.compareMessages(self._get_mail(), +'''FROM: roundup-admin at your.tracker.email.domain.example +TO: chef at bork.bork.bork, richard at test.test +Content-Type: multipart/mixed; boundary="utf-8" +Subject: [issue1] Testing... +To: chef at bork.bork.bork, richard at test.test +From: "Contrary, Mary" +Reply-To: Roundup issue tracker + +MIME-Version: 1.0 +Message-Id: +In-Reply-To: +X-Roundup-Name: Roundup issue tracker +X-Roundup-Loop: hello +X-Roundup-Issue-Status: chatting +X-Roundup-Issue-Files: unnamed +Content-Transfer-Encoding: quoted-printable + + +--utf-8 +MIME-Version: 1.0 +Content-Type: text/plain; charset="utf-8" +Content-Transfer-Encoding: quoted-printable + + +Contrary, Mary added the comment: + +umlaut =C3=A4=C3=B6=C3=BC=C3=84=C3=96=C3=9C=C3=9F + +---------- +status: unread -> chatting + +_______________________________________________________________________ +Roundup issue tracker + +_______________________________________________________________________ +--utf-8 +Content-Type: text/html +MIME-Version: 1.0 +Content-Transfer-Encoding: base64 +Content-Disposition: attachment; + filename="unnamed" + +PGh0bWw+dW1sYXV0IMOkw7bDvMOEw5bDnMOfPC9odG1sPgo= + +--utf-8-- +''') + + def testMultipartCharsetLatin1AttachFile(self): + c = 'umlaut \xc3\xa4\xc3\xb6\xc3\xbc\xc3\x84\xc3\x96\xc3\x9c\xc3\x9f' + self.doNewIssue() + self.db.config.MAIL_CHARSET = 'iso-8859-1' + self._handle_mail(self.multipart_msg_latin1) + messages = self.db.issue.get('1', 'messages') + messages.sort() + msg = self.db.msg.getnode (messages[-1]) + self.assertEqual(len(msg.files), 1) + name = 'unnamed' + content = '' + c + '\n' + for n, id in enumerate (msg.files): + f = self.db.file.getnode (id) + self.assertEqual(f.name, name) + self.assertEqual(f.content, content) + self.assertEqual(msg.content, c) + self.compareMessages(self._get_mail(), +'''FROM: roundup-admin at your.tracker.email.domain.example +TO: chef at bork.bork.bork, richard at test.test +Content-Type: multipart/mixed; boundary="utf-8" +Subject: [issue1] Testing... +To: chef at bork.bork.bork, richard at test.test +From: "Contrary, Mary" +Reply-To: Roundup issue tracker + +MIME-Version: 1.0 +Message-Id: +In-Reply-To: +X-Roundup-Name: Roundup issue tracker +X-Roundup-Loop: hello +X-Roundup-Issue-Status: chatting +X-Roundup-Issue-Files: unnamed +Content-Transfer-Encoding: quoted-printable + + +--utf-8 +MIME-Version: 1.0 +Content-Type: text/plain; charset="iso-8859-1" +Content-Transfer-Encoding: quoted-printable + + +Contrary, Mary added the comment: + +umlaut =E4=F6=FC=C4=D6=DC=DF + +---------- +status: unread -> chatting + +_______________________________________________________________________ +Roundup issue tracker + +_______________________________________________________________________ +--utf-8 +Content-Type: text/html +MIME-Version: 1.0 +Content-Transfer-Encoding: base64 +Content-Disposition: attachment; + filename="unnamed" + +PGh0bWw+dW1sYXV0IMOkw7bDvMOEw5bDnMOfPC9odG1sPgo= + +--utf-8-- +''') + + def testMultipartRFC822(self): + self.doNewIssue() + self._handle_mail(self.multipart_msg_rfc822) + messages = self.db.issue.get('1', 'messages') + messages.sort() + msg = self.db.msg.getnode (messages[-1]) + self.assertEqual(len(msg.files), 1) + name = "Fwd: Original email subject.eml" + for n, id in enumerate (msg.files): + f = self.db.file.getnode (id) + self.assertEqual(f.name, name) + self.assertEqual(msg.content, 'First part: Text') + self.compareMessages(self._get_mail(), +'''TO: chef at bork.bork.bork, richard at test.test +Content-Type: text/plain; charset="utf-8" +Subject: [issue1] Testing... +To: chef at bork.bork.bork, richard at test.test +From: "Contrary, Mary" +Reply-To: Roundup issue tracker + +MIME-Version: 1.0 +Message-Id: +In-Reply-To: +X-Roundup-Name: Roundup issue tracker +X-Roundup-Loop: hello +X-Roundup-Issue-Status: chatting +X-Roundup-Issue-Files: Fwd: Original email subject.eml +Content-Transfer-Encoding: quoted-printable + + +--utf-8 +MIME-Version: 1.0 +Content-Type: text/plain; charset="utf-8" +Content-Transfer-Encoding: quoted-printable + + +Contrary, Mary added the comment: + +First part: Text + +---------- +status: unread -> chatting + +_______________________________________________________________________ +Roundup issue tracker + +_______________________________________________________________________ +--utf-8 +Content-Type: message/rfc822 +MIME-Version: 1.0 +Content-Disposition: attachment; + filename="Fwd: Original email subject.eml" + +Message-Id: +In-Reply-To: +MIME-Version: 1.0 +Subject: Fwd: Original email subject +Date: Mon, 23 Aug 2010 08:23:33 +0200 +Content-Type: multipart/alternative; boundary="090500050101020406060002" + +This is a multi-part message in MIME format. +--090500050101020406060002 +Content-Type: text/plain; charset=ISO-8859-15; format=flowed +Content-Transfer-Encoding: 7bit + +some text in inner email +======================== + +--090500050101020406060002 +Content-Type: text/html; charset=ISO-8859-15 +Content-Transfer-Encoding: 7bit + + +some text in inner email +======================== + + +--090500050101020406060002-- + +--utf-8-- +''') + + def testMultipartRFC822Unpack(self): self.doNewIssue() - self.db.config.MAILGW_IGNORE_ALTERNATIVES = True - self._handle_mail(self.multipart_msg) + self.db.config.MAILGW_UNPACK_RFC822 = True + self._handle_mail(self.multipart_msg_rfc822) messages = self.db.issue.get('1', 'messages') messages.sort() msg = self.db.msg.getnode (messages[-1]) - assert(len(msg.files) == 2) - names = {1 : 'second.dvi'} - content = {0 : 'test attachment third text/plain\n', - 1 : 'Just a test\n'} + self.assertEqual(len(msg.files), 2) + t = 'some text in inner email\n========================\n' + content = {0 : t, 1 : '\n' + t + '\n'} for n, id in enumerate (msg.files): f = self.db.file.getnode (id) - self.assertEqual(f.name, names.get (n, 'unnamed')) + self.assertEqual(f.name, 'unnamed') if n in content : self.assertEqual(f.content, content [n]) - self.assertEqual(msg.content, 'test attachment second text/plain') + self.assertEqual(msg.content, 'First part: Text') def testSimpleFollowup(self): self.doNewIssue() @@ -485,7 +1051,8 @@ Subject: [issue1] Testing... To: chef at bork.bork.bork, richard at test.test From: "Contrary, Mary" -Reply-To: Roundup issue tracker +Reply-To: Roundup issue tracker + MIME-Version: 1.0 Message-Id: In-Reply-To: @@ -533,7 +1100,112 @@ Subject: [issue1] Testing... To: chef at bork.bork.bork, john at test.test, mary at test.test From: richard -Reply-To: Roundup issue tracker +Reply-To: Roundup issue tracker + +MIME-Version: 1.0 +Message-Id: +In-Reply-To: +X-Roundup-Name: Roundup issue tracker +X-Roundup-Loop: hello +X-Roundup-Issue-Status: chatting +Content-Transfer-Encoding: quoted-printable + + +richard added the comment: + +This is a followup + +---------- +assignedto: -> mary +nosy: +john, mary +status: unread -> chatting + +_______________________________________________________________________ +Roundup issue tracker + +_______________________________________________________________________ +''') + + def testFollowupNoSubjectChange(self): + self.db.config.MAILGW_SUBJECT_UPDATES_TITLE = 'no' + self.doNewIssue() + + self._handle_mail('''Content-Type: text/plain; + charset="iso-8859-1" +From: richard +To: issue_tracker at your.tracker.email.domain.example +Message-Id: +In-Reply-To: +Subject: [issue1] Wrzlbrmft... [assignedto=mary; nosy=+john] + +This is a followup +''') + l = self.db.issue.get('1', 'nosy') + l.sort() + self.assertEqual(l, [self.chef_id, self.richard_id, self.mary_id, + self.john_id]) + + self.compareMessages(self._get_mail(), +'''FROM: roundup-admin at your.tracker.email.domain.example +TO: chef at bork.bork.bork, john at test.test, mary at test.test +Content-Type: text/plain; charset="utf-8" +Subject: [issue1] Testing... +To: chef at bork.bork.bork, john at test.test, mary at test.test +From: richard +Reply-To: Roundup issue tracker + +MIME-Version: 1.0 +Message-Id: +In-Reply-To: +X-Roundup-Name: Roundup issue tracker +X-Roundup-Loop: hello +X-Roundup-Issue-Status: chatting +Content-Transfer-Encoding: quoted-printable + + +richard added the comment: + +This is a followup + +---------- +assignedto: -> mary +nosy: +john, mary +status: unread -> chatting + +_______________________________________________________________________ +Roundup issue tracker + +_______________________________________________________________________ +''') + self.assertEqual(self.db.issue.get('1','title'), 'Testing...') + + def testFollowupExplicitSubjectChange(self): + self.doNewIssue() + + self._handle_mail('''Content-Type: text/plain; + charset="iso-8859-1" +From: richard +To: issue_tracker at your.tracker.email.domain.example +Message-Id: +In-Reply-To: +Subject: [issue1] Wrzlbrmft... [assignedto=mary; nosy=+john; title=new title] + +This is a followup +''') + l = self.db.issue.get('1', 'nosy') + l.sort() + self.assertEqual(l, [self.chef_id, self.richard_id, self.mary_id, + self.john_id]) + + self.compareMessages(self._get_mail(), +'''FROM: roundup-admin at your.tracker.email.domain.example +TO: chef at bork.bork.bork, john at test.test, mary at test.test +Content-Type: text/plain; charset="utf-8" +Subject: [issue1] new title +To: chef at bork.bork.bork, john at test.test, mary at test.test +From: richard +Reply-To: Roundup issue tracker + MIME-Version: 1.0 Message-Id: In-Reply-To: @@ -551,6 +1223,7 @@ assignedto: -> mary nosy: +john, mary status: unread -> chatting +title: Testing... -> new title _______________________________________________________________________ Roundup issue tracker @@ -558,6 +1231,49 @@ _______________________________________________________________________ ''') + def testNosyGeneration(self): + self.db.issue.create(title='test') + + # create a nosy message + msg = self.db.msg.create(content='This is a test', + author=self.richard_id, messageid='') + self.db.journaltag = 'richard' + l = self.db.issue.create(title='test', messages=[msg], + nosy=[self.chef_id, self.mary_id, self.john_id]) + + self.compareMessages(self._get_mail(), +'''FROM: roundup-admin at your.tracker.email.domain.example +TO: chef at bork.bork.bork, john at test.test, mary at test.test +Content-Type: text/plain; charset="utf-8" +Subject: [issue2] test +To: chef at bork.bork.bork, john at test.test, mary at test.test +From: richard +Reply-To: Roundup issue tracker + +MIME-Version: 1.0 +Message-Id: +X-Roundup-Name: Roundup issue tracker +X-Roundup-Loop: hello +X-Roundup-Issue-Status: unread +Content-Transfer-Encoding: quoted-printable + + +New submission from richard : + +This is a test + +---------- +messages: 1 +nosy: Chef, john, mary, richard +status: unread +title: test + +_______________________________________________________________________ +Roundup issue tracker + +_______________________________________________________________________ +''') + def testPropertyChangeOnly(self): self.doNewIssue() oldvalues = self.db.getnode('issue', '1').copy() @@ -565,7 +1281,7 @@ # reconstruct old behaviour: This would reuse the # database-handle from the doNewIssue above which has committed # as user "Chef". So we close and reopen the db as that user. - self.db.close() + #self.db.close() actually don't close 'cos this empties memorydb self.db = self.instance.open('Chef') self.db.issue.set('1', assignedto=self.chef_id) self.db.commit() @@ -590,8 +1306,10 @@ X-Roundup-Loop: hello X-Roundup-Issue-Status: unread X-Roundup-Version: 1.3.3 +In-Reply-To: MIME-Version: 1.0 -Reply-To: Roundup issue tracker +Reply-To: Roundup issue tracker + Content-Transfer-Encoding: quoted-printable @@ -629,7 +1347,8 @@ Subject: [issue1] Testing... To: chef at bork.bork.bork, john at test.test, mary at test.test From: richard -Reply-To: Roundup issue tracker +Reply-To: Roundup issue tracker + MIME-Version: 1.0 Message-Id: In-Reply-To: @@ -742,7 +1461,8 @@ Subject: [issue1] Testing... To: chef at bork.bork.bork, richard at test.test From: John Doe -Reply-To: Roundup issue tracker +Reply-To: Roundup issue tracker + MIME-Version: 1.0 Message-Id: In-Reply-To: @@ -788,7 +1508,8 @@ Subject: [issue1] Testing... To: chef at bork.bork.bork From: richard -Reply-To: Roundup issue tracker +Reply-To: Roundup issue tracker + MIME-Version: 1.0 Message-Id: In-Reply-To: @@ -834,7 +1555,8 @@ Subject: [issue1] Testing... To: chef at bork.bork.bork, john at test.test, richard at test.test From: John Doe -Reply-To: Roundup issue tracker +Reply-To: Roundup issue tracker + MIME-Version: 1.0 Message-Id: In-Reply-To: @@ -879,7 +1601,8 @@ Subject: [issue1] Testing... To: chef at bork.bork.bork, richard at test.test From: John Doe -Reply-To: Roundup issue tracker +Reply-To: Roundup issue tracker + MIME-Version: 1.0 Message-Id: In-Reply-To: @@ -924,7 +1647,8 @@ Subject: [issue1] Testing... To: chef at bork.bork.bork From: richard -Reply-To: Roundup issue tracker +Reply-To: Roundup issue tracker + MIME-Version: 1.0 Message-Id: In-Reply-To: @@ -1008,7 +1732,7 @@ assert not os.path.exists(SENDMAILDEBUG) def testNewUserAuthor(self): - + self.db.commit() l = self.db.user.list() l.sort() message = '''Content-Type: text/plain; @@ -1020,12 +1744,9 @@ This is a test submission of a new issue. ''' - def hook (db, **kw): - ''' set up callback for db open ''' - db.security.role['anonymous'].permissions=[] - anonid = db.user.lookup('anonymous') - db.user.set(anonid, roles='Anonymous') - self.instance.schema_hook = hook + self.db.security.role['anonymous'].permissions=[] + anonid = self.db.user.lookup('anonymous') + self.db.user.set(anonid, roles='Anonymous') try: self._handle_mail(message) except Unauthorized, value: @@ -1034,23 +1755,17 @@ Unknown address: fubar at bork.bork.bork """) - assert not body_diff, body_diff - else: raise AssertionError, "Unathorized not raised when handling mail" - - def hook (db, **kw): - ''' set up callback for db open ''' - # Add Web Access role to anonymous, and try again to make sure - # we get a "please register at:" message this time. - p = [ - db.security.getPermission('Create', 'user'), - db.security.getPermission('Web Access', None), - ] - db.security.role['anonymous'].permissions=p - self.instance.schema_hook = hook + # Add Web Access role to anonymous, and try again to make sure + # we get a "please register at:" message this time. + p = [ + self.db.security.getPermission('Register', 'user'), + self.db.security.getPermission('Web Access', None), + ] + self.db.security.role['anonymous'].permissions=p try: self._handle_mail(message) except Unauthorized, value: @@ -1063,9 +1778,7 @@ Unknown address: fubar at bork.bork.bork """) - assert not body_diff, body_diff - else: raise AssertionError, "Unathorized not raised when handling mail" @@ -1074,21 +1787,18 @@ m.sort() self.assertEqual(l, m) - def hook (db, **kw): - ''' set up callback for db open ''' - # now with the permission - p = [ - db.security.getPermission('Create', 'user'), - db.security.getPermission('Email Access', None), - ] - db.security.role['anonymous'].permissions=p - self.instance.schema_hook = hook + # now with the permission + p = [ + self.db.security.getPermission('Register', 'user'), + self.db.security.getPermission('Email Access', None), + ] + self.db.security.role['anonymous'].permissions=p self._handle_mail(message) m = self.db.user.list() m.sort() self.assertNotEqual(l, m) - def testNewUserAuthorHighBit(self): + def testNewUserAuthorEncodedName(self): l = set(self.db.user.list()) # From: name has Euro symbol in it message = '''Content-Type: text/plain; @@ -1100,20 +1810,48 @@ This is a test submission of a new issue. ''' - def hook (db, **kw): - ''' set up callback for db open ''' - p = [ - db.security.getPermission('Create', 'user'), - db.security.getPermission('Email Access', None), - ] - db.security.role['anonymous'].permissions=p - self.instance.schema_hook = hook + p = [ + self.db.security.getPermission('Register', 'user'), + self.db.security.getPermission('Email Access', None), + self.db.security.getPermission('Create', 'issue'), + self.db.security.getPermission('Create', 'msg'), + ] + self.db.security.role['anonymous'].permissions = p self._handle_mail(message) m = set(self.db.user.list()) new = list(m - l)[0] name = self.db.user.get(new, 'realname') self.assertEquals(name, 'H???llo') + def testNewUserAuthorMixedEncodedName(self): + l = set(self.db.user.list()) + # From: name has Euro symbol in it + message = '''Content-Type: text/plain; + charset="iso-8859-1" +From: Firstname =?utf-8?b?w6TDtsOf?= Last +To: issue_tracker at your.tracker.email.domain.example +Message-Id: +Subject: [issue] Test =?utf-8?b?w4TDlsOc?= umlauts + X1 + X2 + +This is a test submission of a new issue. +''' + p = [ + self.db.security.getPermission('Register', 'user'), + self.db.security.getPermission('Email Access', None), + self.db.security.getPermission('Create', 'issue'), + self.db.security.getPermission('Create', 'msg'), + ] + self.db.security.role['anonymous'].permissions = p + self._handle_mail(message) + title = self.db.issue.get('1', 'title') + self.assertEquals(title, 'Test \xc3\x84\xc3\x96\xc3\x9c umlauts X1 X2') + m = set(self.db.user.list()) + new = list(m - l)[0] + name = self.db.user.get(new, 'realname') + self.assertEquals(name, 'Firstname \xc3\xa4\xc3\xb6\xc3\x9f Last') + def testUnknownUser(self): l = set(self.db.user.list()) message = '''Content-Type: text/plain; @@ -1125,8 +1863,7 @@ This is a test submission of a new issue. ''' - self.db.close() - handler = self.instance.MailGW(self.instance) + handler = self._create_mailgw(message) # we want a bounce message: handler.trapExceptions = 1 ret = handler.main(StringIO(message)) @@ -1153,7 +1890,11 @@ -You are not a registered user. +You are not a registered user. Please register at: + +http://tracker.example/cgi-bin/roundup.cgi/bugs/user?template=register + +...before sending mail to the tracker. Unknown address: nonexisting at bork.bork.bork @@ -1175,6 +1916,9 @@ ''') def testEnc01(self): + self.db.user.set(self.mary_id, + realname='\xe4\xf6\xfc\xc4\xd6\xdc\xdf, Mary'.decode + ('latin-1').encode('utf-8')) self.doNewIssue() self._handle_mail('''Content-Type: text/plain; charset="iso-8859-1" @@ -1196,8 +1940,10 @@ Content-Type: text/plain; charset="utf-8" Subject: [issue1] Testing... To: chef at bork.bork.bork, richard at test.test -From: "Contrary, Mary" -Reply-To: Roundup issue tracker +From: =?utf-8?b?w6TDtsO8w4TDlsOcw58sIE1hcnk=?= + +Reply-To: Roundup issue tracker + MIME-Version: 1.0 Message-Id: In-Reply-To: @@ -1207,7 +1953,8 @@ Content-Transfer-Encoding: quoted-printable -Contrary, Mary added the comment: +=C3=A4=C3=B6=C3=BC=C3=84=C3=96=C3=9C=C3=9F, Mary added the= + comment: A message with encoding (encoded oe =C3=B6) @@ -1244,7 +1991,8 @@ Subject: [issue1] Testing... To: chef at bork.bork.bork, richard at test.test From: "Contrary, Mary" -Reply-To: Roundup issue tracker +Reply-To: Roundup issue tracker + MIME-Version: 1.0 Message-Id: In-Reply-To: @@ -1298,7 +2046,8 @@ Subject: [issue1] Testing... To: chef at bork.bork.bork, richard at test.test From: "Contrary, Mary" -Reply-To: Roundup issue tracker +Reply-To: Roundup issue tracker + MIME-Version: 1.0 Message-Id: In-Reply-To: @@ -1375,7 +2124,8 @@ Subject: [issue1] Testing... To: chef at bork.bork.bork From: richard -Reply-To: Roundup issue tracker +Reply-To: Roundup issue tracker + MIME-Version: 1.0 Message-Id: In-Reply-To: @@ -1877,6 +2627,22 @@ assert not os.path.exists(SENDMAILDEBUG) self.assertEqual(self.db.keyword.get('1', 'name'), 'Bar') + def testOneCharSubject(self): + message = '''Content-Type: text/plain; + charset="iso-8859-1" +From: Chef +To: issue_tracker at your.tracker.email.domain.example +Subject: b +Cc: richard at test.test +Reply-To: chef at bork.bork.bork +Message-Id: + +''' + try: + self._handle_mail(message) + except MailUsageError: + self.fail('MailUsageError raised') + def testIssueidLast(self): nodeid1 = self.doNewIssue() nodeid2 = self._handle_mail('''Content-Type: text/plain; @@ -1893,6 +2659,290 @@ assert nodeid1 == nodeid2 self.assertEqual(self.db.issue.get(nodeid2, 'title'), "Testing...") + def testSecurityMessagePermissionContent(self): + id = self.doNewIssue() + issue = self.db.issue.getnode (id) + self.db.security.addRole(name='Nomsg') + self.db.security.addPermissionToRole('Nomsg', 'Email Access') + for cl in 'issue', 'file', 'keyword': + for p in 'View', 'Edit', 'Create': + self.db.security.addPermissionToRole('Nomsg', p, cl) + self.db.user.set(self.mary_id, roles='Nomsg') + nodeid = self._handle_mail('''Content-Type: text/plain; + charset="iso-8859-1" +From: Chef +To: issue_tracker at your.tracker.email.domain.example +Message-Id: +Subject: [issue%(id)s] Testing... [nosy=+mary] + +Just a test reply +'''%locals()) + assert os.path.exists(SENDMAILDEBUG) + self.compareMessages(self._get_mail(), +'''FROM: roundup-admin at your.tracker.email.domain.example +TO: chef at bork.bork.bork, richard at test.test +Content-Type: text/plain; charset="utf-8" +Subject: [issue1] Testing... +To: richard at test.test +From: "Bork, Chef" +Reply-To: Roundup issue tracker + +MIME-Version: 1.0 +Message-Id: +In-Reply-To: +X-Roundup-Name: Roundup issue tracker +X-Roundup-Loop: hello +X-Roundup-Issue-Status: chatting +Content-Transfer-Encoding: quoted-printable + + +Bork, Chef added the comment: + +Just a test reply + +---------- +nosy: +mary +status: unread -> chatting + +_______________________________________________________________________ +Roundup issue tracker + +_______________________________________________________________________ +''') + + def testOutlookAttachment(self): + message = '''X-MimeOLE: Produced By Microsoft Exchange V6.5 +Content-class: urn:content-classes:message +MIME-Version: 1.0 +Content-Type: multipart/mixed; + boundary="----_=_NextPart_001_01CACA65.40A51CBC" +Subject: Example of a failed outlook attachment e-mail +Date: Tue, 23 Mar 2010 01:43:44 -0700 +Message-ID: +X-MS-Has-Attach: yes +X-MS-TNEF-Correlator: +Thread-Topic: Example of a failed outlook attachment e-mail +Thread-Index: AcrKJo/t3pUBBwTpSwWNE3LE67UBDQ== +From: "Hugh" +To: +X-OriginalArrivalTime: 23 Mar 2010 08:45:57.0350 (UTC) FILETIME=[41893860:01CACA65] + +This is a multi-part message in MIME format. + +------_=_NextPart_001_01CACA65.40A51CBC +Content-Type: multipart/alternative; + boundary="----_=_NextPart_002_01CACA65.40A51CBC" + + +------_=_NextPart_002_01CACA65.40A51CBC +Content-Type: text/plain; + charset="us-ascii" +Content-Transfer-Encoding: quoted-printable + + +Hi Richard, + +I suppose this isn't the exact message that was sent but is a resend of +one of my trial messages that failed. For your benefit I changed the +subject line and am adding these words to the message body. Should +still be as problematic, but if you like I can resend an exact copy of a +failed message changing nothing except putting your address instead of +our tracker. + +Thanks very much for taking time to look into this. Much appreciated. + + <>=20 + +------_=_NextPart_002_01CACA65.40A51CBC +Content-Type: text/html; + charset="us-ascii" +Content-Transfer-Encoding: quoted-printable + + + + + + +Example of a failed outlook attachment e-mail + + + +
    + +

    Hi Richard, +

    + +

    I suppose this isn't the exact message = +that was sent but is a resend of one of my trial messages that = +failed.  For your benefit I changed the subject line and am adding = +these words to the message body.  Should still be as problematic, = +but if you like I can resend an exact copy of a failed message changing = +nothing except putting your address instead of our tracker.

    + +

    Thanks very much for taking time to = +look into this.  Much appreciated. +

    +
    + +

    <<battery = +backup>> +

    + + + +------_=_NextPart_002_01CACA65.40A51CBC-- + +------_=_NextPart_001_01CACA65.40A51CBC +Content-Type: message/rfc822 +Content-Transfer-Encoding: 7bit + +X-MimeOLE: Produced By Microsoft Exchange V6.5 +MIME-Version: 1.0 +Content-Type: multipart/alternative; + boundary="----_=_NextPart_003_01CAC15A.29717800" +X-OriginalArrivalTime: 11 Mar 2010 20:33:51.0249 (UTC) FILETIME=[28FEE010:01CAC15A] +Content-class: urn:content-classes:message +Subject: battery backup +Date: Thu, 11 Mar 2010 13:33:43 -0700 +Message-ID: +X-MS-Has-Attach: +X-MS-TNEF-Correlator: +Thread-Topic: battery backup +Thread-Index: AcrBWimtulTrSvBdQ2CcfZ8lyQdxmQ== +From: "Jerry" +To: "Hugh" + +This is a multi-part message in MIME format. + +------_=_NextPart_003_01CAC15A.29717800 +Content-Type: text/plain; + charset="iso-8859-1" +Content-Transfer-Encoding: quoted-printable + +Dear Hugh, + A car batter has an energy capacity of ~ 500Wh. A UPS=20 +battery is worse than this. + +if we need to provied 100kW for 30 minutes that will take 100 car=20 +batteries. This seems like an awful lot of batteries. + +Of course I like your idea of making the time 1 minute, so we get to=20 +a more modest number of batteries + +Jerry + + +------_=_NextPart_003_01CAC15A.29717800 +Content-Type: text/html; + charset="iso-8859-1" +Content-Transfer-Encoding: quoted-printable + + + + + + +battery backup + + + + +

    Dear Hugh, + +
            A car = +batter has an energy capacity of ~ 500Wh.  A UPS + +
    battery is worse than this. +

    + +

    if we need to provied 100kW for 30 minutes that will = +take 100 car + +
    batteries.  This seems like an awful lot of = +batteries. +

    + +

    Of course I like your idea of making the time 1 = +minute, so we get to + +
    a more modest number of batteries +

    + +

    Jerry +

    + + + +------_=_NextPart_003_01CAC15A.29717800-- + +------_=_NextPart_001_01CACA65.40A51CBC-- +''' + nodeid = self._handle_mail(message) + assert not os.path.exists(SENDMAILDEBUG) + msgid = self.db.issue.get(nodeid, 'messages')[0] + self.assert_(self.db.msg.get(msgid, 'content').startswith('Hi Richard')) + self.assertEqual(self.db.msg.get(msgid, 'files'), ['1', '2']) + fileid = self.db.msg.get(msgid, 'files')[0] + self.assertEqual(self.db.file.get(fileid, 'type'), 'text/html') + fileid = self.db.msg.get(msgid, 'files')[1] + self.assertEqual(self.db.file.get(fileid, 'type'), 'message/rfc822') + + def testForwardedMessageAttachment(self): + message = '''Return-Path: +Received: from localhost(127.0.0.1), claiming to be "[115.130.26.69]" +via SMTP by localhost, id smtpdAAApLaWrq; Tue Apr 13 23:10:05 2010 +Message-ID: <4BC4F9C7.50409 at test.test> +Date: Wed, 14 Apr 2010 09:09:59 +1000 +From: Rupert Goldie +User-Agent: Thunderbird 2.0.0.24 (Windows/20100228) +MIME-Version: 1.0 +To: ekit issues +Subject: [Fwd: PHP ERROR (fb)] post limit reached +Content-Type: multipart/mixed; boundary="------------000807090608060304010403" + +This is a multi-part message in MIME format. +--------------000807090608060304010403 +Content-Type: text/plain; charset=ISO-8859-1; format=flowed +Content-Transfer-Encoding: 7bit + +Catch this exception and log it without emailing. + +--------------000807090608060304010403 +Content-Type: message/rfc822; name="PHP ERROR (fb).eml" +Content-Transfer-Encoding: 7bit +Content-Disposition: inline; filename="PHP ERROR (fb).eml" + +Return-Path: +X-Sieve: CMU Sieve 2.2 +via SMTP by crown.off.ekorp.com, id smtpdAAA1JaW1o; Tue Apr 13 23:01:04 2010 +X-Virus-Scanned: by amavisd-new at ekit.com +To: facebook-errors at test.test +From: ektravj at test.test +Subject: PHP ERROR (fb) +Message-Id: <20100413230100.D601D27E84 at mail2.elax3.ekorp.com> +Date: Tue, 13 Apr 2010 23:01:00 +0000 (UTC) + +[13-Apr-2010 22:49:02] PHP Fatal error: Uncaught exception 'Exception' with message 'Facebook Error Message: Feed action request limit reached' in /app/01/www/virtual/fb.ekit.com/htdocs/includes/functions.php:280 +Stack trace: +#0 /app/01/www/virtual/fb.ekit.com/htdocs/gateway/ekit/feed/index.php(178): fb_exceptions(Object(FacebookRestClientException)) +#1 {main} + thrown in /app/01/www/virtual/fb.ekit.com/htdocs/includes/functions.php on line 280 + + +--------------000807090608060304010403-- +''' + nodeid = self._handle_mail(message) + assert not os.path.exists(SENDMAILDEBUG) + msgid = self.db.issue.get(nodeid, 'messages')[0] + self.assertEqual(self.db.msg.get(msgid, 'content'), + 'Catch this exception and log it without emailing.') + self.assertEqual(self.db.msg.get(msgid, 'files'), ['1']) + fileid = self.db.msg.get(msgid, 'files')[0] + self.assertEqual(self.db.file.get(fileid, 'type'), 'message/rfc822') def test_suite(): suite = unittest.TestSuite() @@ -1904,3 +2954,7 @@ unittest.main(testRunner=runner) # vim: set filetype=python sts=4 sw=4 et si : + + + + Added: tracker/roundup-src/test/test_memorydb.py ============================================================================== --- (empty file) +++ tracker/roundup-src/test/test_memorydb.py Thu Aug 4 15:46:52 2011 @@ -0,0 +1,71 @@ +# $Id: test_memorydb.py,v 1.4 2004-11-03 01:34:21 richard Exp $ + +import unittest, os, shutil, time + +from roundup import hyperdb + +from db_test_base import DBTest, ROTest, SchemaTest, config, setupSchema +import memorydb + +class memorydbOpener: + module = memorydb + + def nuke_database(self): + # really kill it + self.db = None + + db = None + def open_database(self): + if self.db is None: + self.db = self.module.Database(config, 'admin') + return self.db + + def setUp(self): + self.open_database() + setupSchema(self.db, 1, self.module) + + def tearDown(self): + if self.db is not None: + self.db.close() + + # nuke and re-create db for restore + def nukeAndCreate(self): + self.db.close() + self.nuke_database() + self.db = self.module.Database(config, 'admin') + setupSchema(self.db, 0, self.module) + +class memorydbDBTest(memorydbOpener, DBTest): + pass + +class memorydbROTest(memorydbOpener, ROTest): + def setUp(self): + self.db = self.module.Database(config) + setupSchema(self.db, 0, self.module) + +class memorydbSchemaTest(memorydbOpener, SchemaTest): + pass + +from session_common import DBMTest +class memorydbSessionTest(memorydbOpener, DBMTest): + def setUp(self): + self.db = self.module.Database(config, 'admin') + setupSchema(self.db, 1, self.module) + self.sessions = self.db.sessions + +def test_suite(): + suite = unittest.TestSuite() + print 'Including memorydb tests' + suite.addTest(unittest.makeSuite(memorydbDBTest)) + suite.addTest(unittest.makeSuite(memorydbROTest)) + suite.addTest(unittest.makeSuite(memorydbSchemaTest)) + suite.addTest(unittest.makeSuite(memorydbSessionTest)) + return suite + +if __name__ == '__main__': + runner = unittest.TextTestRunner() + unittest.main(testRunner=runner) + + +# vim: set filetype=python ts=4 sw=4 et si + Modified: tracker/roundup-src/test/test_multipart.py ============================================================================== --- tracker/roundup-src/test/test_multipart.py (original) +++ tracker/roundup-src/test/test_multipart.py Thu Aug 4 15:46:52 2011 @@ -23,13 +23,25 @@ from roundup.mailgw import Message class TestMessage(Message): + # A note on message/rfc822: The content of such an attachment is an + # email with at least one header line. RFC2046 tells us: """ A + # media type of "message/rfc822" indicates that the body contains an + # encapsulated message, with the syntax of an RFC 822 message. + # However, unlike top-level RFC 822 messages, the restriction that + # each "message/rfc822" body must include a "From", "Date", and at + # least one destination header is removed and replaced with the + # requirement that at least one of "From", "Subject", or "Date" must + # be present.""" + # This means we have to add a newline after the mime-header before + # the subject, otherwise the subject is part of the mime header not + # part of the email header. table = {'multipart/signed': ' boundary="boundary-%(indent)s";\n', 'multipart/mixed': ' boundary="boundary-%(indent)s";\n', 'multipart/alternative': ' boundary="boundary-%(indent)s";\n', 'text/plain': ' name="foo.txt"\nfoo\n', 'application/pgp-signature': ' name="foo.gpg"\nfoo\n', 'application/pdf': ' name="foo.pdf"\nfoo\n', - 'message/rfc822': 'Subject: foo\n\nfoo\n'} + 'message/rfc822': '\nSubject: foo\n\nfoo\n'} def __init__(self, spec): """Create a basic MIME message according to 'spec'. @@ -215,7 +227,7 @@ multipart/mixed message/rfc822""", (None, - [('foo', 'message/rfc822', 'foo\n')])) + [('foo.eml', 'message/rfc822', 'Subject: foo\n\nfoo\n')])) def test_suite(): suite = unittest.TestSuite() Modified: tracker/roundup-src/test/test_mysql.py ============================================================================== --- tracker/roundup-src/test/test_mysql.py (original) +++ tracker/roundup-src/test/test_mysql.py Thu Aug 4 15:46:52 2011 @@ -23,6 +23,7 @@ from roundup.backends import get_backend, have_backend from db_test_base import DBTest, ROTest, config, SchemaTest, ClassicInitTest +from db_test_base import ConcurrentDBTest, FilterCacheTest class mysqlOpener: @@ -63,6 +64,24 @@ ClassicInitTest.tearDown(self) self.nuke_database() +class mysqlConcurrencyTest(mysqlOpener, ConcurrentDBTest): + backend = 'mysql' + def setUp(self): + mysqlOpener.setUp(self) + ConcurrentDBTest.setUp(self) + def tearDown(self): + ConcurrentDBTest.tearDown(self) + self.nuke_database() + +class mysqlFilterCacheTest(mysqlOpener, FilterCacheTest): + backend = 'mysql' + def setUp(self): + mysqlOpener.setUp(self) + FilterCacheTest.setUp(self) + def tearDown(self): + FilterCacheTest.tearDown(self) + self.nuke_database() + from session_common import RDBMSTest class mysqlSessionTest(mysqlOpener, RDBMSTest): def setUp(self): @@ -92,6 +111,8 @@ suite.addTest(unittest.makeSuite(mysqlSchemaTest)) suite.addTest(unittest.makeSuite(mysqlClassicInitTest)) suite.addTest(unittest.makeSuite(mysqlSessionTest)) + suite.addTest(unittest.makeSuite(mysqlConcurrencyTest)) + suite.addTest(unittest.makeSuite(mysqlFilterCacheTest)) return suite if __name__ == '__main__': Modified: tracker/roundup-src/test/test_postgresql.py ============================================================================== --- tracker/roundup-src/test/test_postgresql.py (original) +++ tracker/roundup-src/test/test_postgresql.py Thu Aug 4 15:46:52 2011 @@ -22,6 +22,7 @@ from roundup.hyperdb import DatabaseError from db_test_base import DBTest, ROTest, config, SchemaTest, ClassicInitTest +from db_test_base import ConcurrentDBTest, FilterCacheTest from roundup.backends import get_backend, have_backend @@ -57,6 +58,26 @@ ROTest.tearDown(self) postgresqlOpener.tearDown(self) +class postgresqlConcurrencyTest(postgresqlOpener, ConcurrentDBTest): + backend = 'postgresql' + def setUp(self): + postgresqlOpener.setUp(self) + ConcurrentDBTest.setUp(self) + + def tearDown(self): + ConcurrentDBTest.tearDown(self) + postgresqlOpener.tearDown(self) + +class postgresqlFilterCacheTest(postgresqlOpener, FilterCacheTest): + backend = 'postgresql' + def setUp(self): + postgresqlOpener.setUp(self) + FilterCacheTest.setUp(self) + + def tearDown(self): + FilterCacheTest.tearDown(self) + postgresqlOpener.tearDown(self) + class postgresqlSchemaTest(postgresqlOpener, SchemaTest): def setUp(self): postgresqlOpener.setUp(self) @@ -102,6 +123,8 @@ suite.addTest(unittest.makeSuite(postgresqlSchemaTest)) suite.addTest(unittest.makeSuite(postgresqlClassicInitTest)) suite.addTest(unittest.makeSuite(postgresqlSessionTest)) + suite.addTest(unittest.makeSuite(postgresqlConcurrencyTest)) + suite.addTest(unittest.makeSuite(postgresqlFilterCacheTest)) return suite # vim: set et sts=4 sw=4 : Modified: tracker/roundup-src/test/test_security.py ============================================================================== --- tracker/roundup-src/test/test_security.py (original) +++ tracker/roundup-src/test/test_security.py Thu Aug 4 15:46:52 2011 @@ -23,7 +23,7 @@ import os, unittest, shutil from roundup import backends -from roundup.password import Password +import roundup.password from db_test_base import setupSchema, MyTestCase, config class PermissionTest(MyTestCase): @@ -178,6 +178,65 @@ self.assertEquals(has('Test', none, 'test', itemid='1'), 0) self.assertEquals(has('Test', none, 'test', itemid='2'), 0) + def testTransitiveSearchPermissions(self): + add = self.db.security.addPermission + has = self.db.security.hasSearchPermission + addRole = self.db.security.addRole + addToRole = self.db.security.addPermissionToRole + addRole(name='User') + addRole(name='Anonymous') + addRole(name='Issue') + addRole(name='Msg') + addRole(name='UV') + user = self.db.user.create(username='user1', roles='User') + anon = self.db.user.create(username='anonymous', roles='Anonymous') + ui = self.db.user.create(username='user2', roles='Issue') + uim = self.db.user.create(username='user3', roles='Issue,Msg') + uimu = self.db.user.create(username='user4', roles='Issue,Msg,UV') + iv = add(name="View", klass="issue") + addToRole('User', iv) + addToRole('Anonymous', iv) + addToRole('Issue', iv) + ms = add(name="Search", klass="msg") + addToRole('User', ms) + addToRole('Anonymous', ms) + addToRole('Msg', ms) + uv = add(name="View", klass="user") + addToRole('User', uv) + addToRole('UV', uv) + self.assertEquals(has(anon, 'issue', 'messages'), 1) + self.assertEquals(has(anon, 'issue', 'messages.author'), 0) + self.assertEquals(has(anon, 'issue', 'messages.author.username'), 0) + self.assertEquals(has(anon, 'issue', 'messages.recipients'), 0) + self.assertEquals(has(anon, 'issue', 'messages.recipients.username'), 0) + self.assertEquals(has(user, 'issue', 'messages'), 1) + self.assertEquals(has(user, 'issue', 'messages.author'), 1) + self.assertEquals(has(user, 'issue', 'messages.author.username'), 1) + self.assertEquals(has(user, 'issue', 'messages.recipients'), 1) + self.assertEquals(has(user, 'issue', 'messages.recipients.username'), 1) + + self.assertEquals(has(ui, 'issue', 'messages'), 0) + self.assertEquals(has(ui, 'issue', 'messages.author'), 0) + self.assertEquals(has(ui, 'issue', 'messages.author.username'), 0) + self.assertEquals(has(ui, 'issue', 'messages.recipients'), 0) + self.assertEquals(has(ui, 'issue', 'messages.recipients.username'), 0) + + self.assertEquals(has(uim, 'issue', 'messages'), 1) + self.assertEquals(has(uim, 'issue', 'messages.author'), 0) + self.assertEquals(has(uim, 'issue', 'messages.author.username'), 0) + self.assertEquals(has(uim, 'issue', 'messages.recipients'), 0) + self.assertEquals(has(uim, 'issue', 'messages.recipients.username'), 0) + + self.assertEquals(has(uimu, 'issue', 'messages'), 1) + self.assertEquals(has(uimu, 'issue', 'messages.author'), 1) + self.assertEquals(has(uimu, 'issue', 'messages.author.username'), 1) + self.assertEquals(has(uimu, 'issue', 'messages.recipients'), 1) + self.assertEquals(has(uimu, 'issue', 'messages.recipients.username'), 1) + + # roundup.password has its own built-in test, call it. + def test_password(self): + roundup.password.test() + def test_suite(): suite = unittest.TestSuite() suite.addTest(unittest.makeSuite(PermissionTest)) Modified: tracker/roundup-src/test/test_sqlite.py ============================================================================== --- tracker/roundup-src/test/test_sqlite.py (original) +++ tracker/roundup-src/test/test_sqlite.py Thu Aug 4 15:46:52 2011 @@ -21,6 +21,7 @@ from roundup.backends import get_backend, have_backend from db_test_base import DBTest, ROTest, SchemaTest, ClassicInitTest, config +from db_test_base import ConcurrentDBTest, FilterCacheTest class sqliteOpener: if have_backend('sqlite'): @@ -41,6 +42,12 @@ class sqliteClassicInitTest(ClassicInitTest): backend = 'sqlite' +class sqliteConcurrencyTest(ConcurrentDBTest): + backend = 'sqlite' + +class sqliteFilterCacheTest(sqliteOpener, FilterCacheTest): + backend = 'sqlite' + from session_common import RDBMSTest class sqliteSessionTest(sqliteOpener, RDBMSTest): pass @@ -57,6 +64,8 @@ suite.addTest(unittest.makeSuite(sqliteSchemaTest)) suite.addTest(unittest.makeSuite(sqliteClassicInitTest)) suite.addTest(unittest.makeSuite(sqliteSessionTest)) + suite.addTest(unittest.makeSuite(sqliteConcurrencyTest)) + suite.addTest(unittest.makeSuite(sqliteFilterCacheTest)) return suite if __name__ == '__main__': Modified: tracker/roundup-src/test/test_templating.py ============================================================================== --- tracker/roundup-src/test/test_templating.py (original) +++ tracker/roundup-src/test/test_templating.py Thu Aug 4 15:46:52 2011 @@ -147,6 +147,7 @@ p = StringHTMLProperty(self.client, 'test', '1', None, 'test', '') def t(s): return p.hyper_re.sub(p._hyper_repl, s) ae = self.assertEqual + ae(t('item123123123123'), 'item123123123123') ae(t('http://roundup.net/'), 'http://roundup.net/') ae(t('<HTTP://roundup.net/>'), Modified: tracker/roundup-src/test/test_xmlrpc.py ============================================================================== --- tracker/roundup-src/test/test_xmlrpc.py (original) +++ tracker/roundup-src/test/test_xmlrpc.py Thu Aug 4 15:46:52 2011 @@ -115,6 +115,88 @@ finally: self.db.setCurrentUser('joe') + def testAuthFilter(self): + # this checks if we properly check for search permissions + self.db.security.permissions = {} + self.db.security.addRole(name='User') + self.db.security.addRole(name='Project') + self.db.security.addPermissionToRole('User', 'Web Access') + self.db.security.addPermissionToRole('Project', 'Web Access') + # Allow viewing keyword + p = self.db.security.addPermission(name='View', klass='keyword') + self.db.security.addPermissionToRole('User', p) + # Allow viewing interesting things (but not keyword) on issue + # But users might only view issues where they are on nosy + # (so in the real world the check method would be better) + p = self.db.security.addPermission(name='View', klass='issue', + properties=("title", "status"), check=lambda x,y,z: True) + self.db.security.addPermissionToRole('User', p) + # Allow role "Project" access to whole issue + p = self.db.security.addPermission(name='View', klass='issue') + self.db.security.addPermissionToRole('Project', p) + # Allow all access to status: + p = self.db.security.addPermission(name='View', klass='status') + self.db.security.addPermissionToRole('User', p) + self.db.security.addPermissionToRole('Project', p) + + keyword = self.db.keyword + status = self.db.status + issue = self.db.issue + + d1 = keyword.create(name='d1') + d2 = keyword.create(name='d2') + open = status.create(name='open') + closed = status.create(name='closed') + issue.create(title='i1', status=open, keyword=[d2]) + issue.create(title='i2', status=open, keyword=[d1]) + issue.create(title='i2', status=closed, keyword=[d1]) + + chef = self.db.user.create(username = 'chef', roles='User, Project') + joe = self.db.user.lookup('joe') + + # Conditionally allow view of whole issue (check is False here, + # this might check for keyword owner in the real world) + p = self.db.security.addPermission(name='View', klass='issue', + check=lambda x,y,z: False) + self.db.security.addPermissionToRole('User', p) + # Allow user to search for issue.status + p = self.db.security.addPermission(name='Search', klass='issue', + properties=("status",)) + self.db.security.addPermissionToRole('User', p) + + keyw = {'keyword':self.db.keyword.lookup('d1')} + stat = {'status':self.db.status.lookup('open')} + keygroup = keysort = [('+', 'keyword')] + self.db.commit() + + # Filter on keyword ignored for role 'User': + r = self.server.filter('issue', None, keyw) + self.assertEqual(r, ['1', '2', '3']) + # Filter on status works for all: + r = self.server.filter('issue', None, stat) + self.assertEqual(r, ['1', '2']) + # Sorting and grouping for class User fails: + r = self.server.filter('issue', None, {}, sort=keysort) + self.assertEqual(r, ['1', '2', '3']) + r = self.server.filter('issue', None, {}, group=keygroup) + self.assertEqual(r, ['1', '2', '3']) + + self.db.close() + self.db = self.instance.open('chef') + self.server = RoundupInstance(self.db, self.instance.actions, None) + + # Filter on keyword works for role 'Project': + r = self.server.filter('issue', None, keyw) + self.assertEqual(r, ['2', '3']) + # Filter on status works for all: + r = self.server.filter('issue', None, stat) + self.assertEqual(r, ['1', '2']) + # Sorting and grouping for class Project works: + r = self.server.filter('issue', None, {}, sort=keysort) + self.assertEqual(r, ['2', '3', '1']) + r = self.server.filter('issue', None, {}, group=keygroup) + self.assertEqual(r, ['2', '3', '1']) + def test_suite(): suite = unittest.TestSuite() for l in list_backends(): From python-checkins at python.org Thu Aug 4 18:10:10 2011 From: python-checkins at python.org (benjamin.peterson) Date: Thu, 04 Aug 2011 18:10:10 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=283=2E2=29=3A_dosmodule_is=2C?= =?utf8?q?_thankfully=2C_no_more?= Message-ID: http://hg.python.org/cpython/rev/8a0f2887c6b7 changeset: 71740:8a0f2887c6b7 branch: 3.2 parent: 71738:4957131ad9dd user: Benjamin Peterson date: Thu Aug 04 11:07:42 2011 -0500 summary: dosmodule is, thankfully, no more files: Modules/posixmodule.c | 2 -- 1 files changed, 0 insertions(+), 2 deletions(-) diff --git a/Modules/posixmodule.c b/Modules/posixmodule.c --- a/Modules/posixmodule.c +++ b/Modules/posixmodule.c @@ -11,8 +11,6 @@ compiler is assumed to be IBM's VisualAge C++ (VACPP). PYCC_GCC is used as the compiler specific macro for the EMX port of gcc to OS/2. */ -/* See also ../Dos/dosmodule.c */ - #ifdef __APPLE__ /* * Step 1 of support for weak-linking a number of symbols existing on -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Thu Aug 4 18:10:11 2011 From: python-checkins at python.org (benjamin.peterson) Date: Thu, 04 Aug 2011 18:10:11 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=282=2E7=29=3A_dosmodule_is=2C?= =?utf8?q?_thankfully=2C_no_more?= Message-ID: http://hg.python.org/cpython/rev/5b7e765ce049 changeset: 71741:5b7e765ce049 branch: 2.7 parent: 71737:5a0726fcb18a user: Benjamin Peterson date: Thu Aug 04 11:07:42 2011 -0500 summary: dosmodule is, thankfully, no more files: Modules/posixmodule.c | 2 -- 1 files changed, 0 insertions(+), 2 deletions(-) diff --git a/Modules/posixmodule.c b/Modules/posixmodule.c --- a/Modules/posixmodule.c +++ b/Modules/posixmodule.c @@ -11,8 +11,6 @@ compiler is assumed to be IBM's VisualAge C++ (VACPP). PYCC_GCC is used as the compiler specific macro for the EMX port of gcc to OS/2. */ -/* See also ../Dos/dosmodule.c */ - #ifdef __APPLE__ /* * Step 1 of support for weak-linking a number of symbols existing on -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Thu Aug 4 18:10:13 2011 From: python-checkins at python.org (benjamin.peterson) Date: Thu, 04 Aug 2011 18:10:13 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=28merge_3=2E2_-=3E_default=29?= =?utf8?q?=3A_merge_3=2E2?= Message-ID: http://hg.python.org/cpython/rev/ee0d1c6c2470 changeset: 71742:ee0d1c6c2470 parent: 71739:65c412586901 parent: 71740:8a0f2887c6b7 user: Benjamin Peterson date: Thu Aug 04 11:09:57 2011 -0500 summary: merge 3.2 files: Modules/posixmodule.c | 2 -- 1 files changed, 0 insertions(+), 2 deletions(-) diff --git a/Modules/posixmodule.c b/Modules/posixmodule.c --- a/Modules/posixmodule.c +++ b/Modules/posixmodule.c @@ -11,8 +11,6 @@ compiler is assumed to be IBM's VisualAge C++ (VACPP). PYCC_GCC is used as the compiler specific macro for the EMX port of gcc to OS/2. */ -/* See also ../Dos/dosmodule.c */ - #ifdef __APPLE__ /* * Step 1 of support for weak-linking a number of symbols existing on -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Aug 5 02:45:41 2011 From: python-checkins at python.org (ezio.melotti) Date: Fri, 5 Aug 2011 02:45:41 +0200 (CEST) Subject: [Python-checkins] r88873 - tracker/instances/python-dev/extensions/openid_login.py Message-ID: <3RVTvT2CNVzMsQ@mail.python.org> Author: ezio.melotti Date: Fri Aug 5 02:45:41 2011 New Revision: 88873 Log: Try to fix a failure. Modified: tracker/instances/python-dev/extensions/openid_login.py Modified: tracker/instances/python-dev/extensions/openid_login.py ============================================================================== --- tracker/instances/python-dev/extensions/openid_login.py (original) +++ tracker/instances/python-dev/extensions/openid_login.py Fri Aug 5 02:45:41 2011 @@ -2,6 +2,7 @@ from roundup.cgi.actions import Action, LoginAction, RegisterAction from roundup.cgi.exceptions import * from roundup import date, password +from M2Crypto.SSL.Checker import NoCertificate providers = {} for p in ( @@ -126,7 +127,11 @@ self.client.error_message.append(self._('Unsupported provider')) return provider_id = providers[provider][2] - session = self.get_session(provider_id) + try: + session = self.get_session(provider_id) + except NoCertificate: + self.client.error_message.append(self._('Peer did not return certificate')) + return realm = self.base+"?@action=openid_return" return_to = realm + "&__came_from=%s" % urllib.quote(self.client.path) url = openid2rp.request_authentication(session.stypes, session.url, From solipsis at pitrou.net Fri Aug 5 05:25:06 2011 From: solipsis at pitrou.net (solipsis at pitrou.net) Date: Fri, 05 Aug 2011 05:25:06 +0200 Subject: [Python-checkins] Daily reference leaks (ee0d1c6c2470): sum=0 Message-ID: results for ee0d1c6c2470 on branch "default" -------------------------------------------- Command line was: ['./python', '-m', 'test.regrtest', '-uall', '-R', '3:3:/home/antoine/cpython/refleaks/reflogWqep7q', '-x'] From python-checkins at python.org Fri Aug 5 07:44:26 2011 From: python-checkins at python.org (brett.cannon) Date: Fri, 05 Aug 2011 07:44:26 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Explicitly_close_a_file_to_?= =?utf8?q?stop_raising_a_ResourceWarning=2E?= Message-ID: http://hg.python.org/cpython/rev/7fb68a8d2509 changeset: 71743:7fb68a8d2509 user: Brett Cannon date: Thu Aug 04 21:34:52 2011 -0700 summary: Explicitly close a file to stop raising a ResourceWarning. files: Lib/test/test_sysconfig.py | 25 +++++++++++++------------ 1 files changed, 13 insertions(+), 12 deletions(-) diff --git a/Lib/test/test_sysconfig.py b/Lib/test/test_sysconfig.py --- a/Lib/test/test_sysconfig.py +++ b/Lib/test/test_sysconfig.py @@ -306,19 +306,20 @@ env = os.environ.copy() env['MACOSX_DEPLOYMENT_TARGET'] = '10.1' - p = subprocess.Popen([ - sys.executable, '-c', - 'import sysconfig; print(sysconfig.get_platform())', - ], - stdout=subprocess.PIPE, - stderr=open('/dev/null'), - env=env) - test_platform = p.communicate()[0].strip() - test_platform = test_platform.decode('utf-8') - status = p.wait() + with open('/dev/null') as dev_null: + p = subprocess.Popen([ + sys.executable, '-c', + 'import sysconfig; print(sysconfig.get_platform())', + ], + stdout=subprocess.PIPE, + stderr=dev_null, + env=env) + test_platform = p.communicate()[0].strip() + test_platform = test_platform.decode('utf-8') + status = p.wait() - self.assertEqual(status, 0) - self.assertEqual(my_platform, test_platform) + self.assertEqual(status, 0) + self.assertEqual(my_platform, test_platform) class MakefileTests(unittest.TestCase): -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Aug 5 07:44:27 2011 From: python-checkins at python.org (brett.cannon) Date: Fri, 05 Aug 2011 07:44:27 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Silence_altered_execution_s?= =?utf8?q?tate_warnings_from_test=5Ftelnetlib_involving_threads=2E?= Message-ID: http://hg.python.org/cpython/rev/e86a12167cf5 changeset: 71744:e86a12167cf5 user: Brett Cannon date: Thu Aug 04 22:37:55 2011 -0700 summary: Silence altered execution state warnings from test_telnetlib involving threads. files: Lib/test/test_telnetlib.py | 1 + 1 files changed, 1 insertions(+), 0 deletions(-) diff --git a/Lib/test/test_telnetlib.py b/Lib/test/test_telnetlib.py --- a/Lib/test/test_telnetlib.py +++ b/Lib/test/test_telnetlib.py @@ -39,6 +39,7 @@ def tearDown(self): self.evt.wait() self.thread.join() + del self.thread # Clear out any dangling Thread objects. def testBasic(self): # connects -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Aug 5 07:44:28 2011 From: python-checkins at python.org (brett.cannon) Date: Fri, 05 Aug 2011 07:44:28 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Silence_the_altered_state_w?= =?utf8?q?arning_about_threadgs_when_running_test=5Fhttplib=2E?= Message-ID: http://hg.python.org/cpython/rev/210d9a2dab09 changeset: 71745:210d9a2dab09 user: Brett Cannon date: Thu Aug 04 22:43:11 2011 -0700 summary: Silence the altered state warning about threadgs when running test_httplib. files: Lib/test/test_httplib.py | 8 ++++---- 1 files changed, 4 insertions(+), 4 deletions(-) diff --git a/Lib/test/test_httplib.py b/Lib/test/test_httplib.py --- a/Lib/test/test_httplib.py +++ b/Lib/test/test_httplib.py @@ -506,8 +506,7 @@ def test_local_good_hostname(self): # The (valid) cert validates the HTTP hostname import ssl - from test.ssl_servers import make_https_server - server = make_https_server(self, CERT_localhost) + server = self.make_server(CERT_localhost) context = ssl.SSLContext(ssl.PROTOCOL_TLSv1) context.verify_mode = ssl.CERT_REQUIRED context.load_verify_locations(CERT_localhost) @@ -515,12 +514,12 @@ h.request('GET', '/nonexistent') resp = h.getresponse() self.assertEqual(resp.status, 404) + del server def test_local_bad_hostname(self): # The (valid) cert doesn't validate the HTTP hostname import ssl - from test.ssl_servers import make_https_server - server = make_https_server(self, CERT_fakehostname) + server = self.make_server(CERT_fakehostname) context = ssl.SSLContext(ssl.PROTOCOL_TLSv1) context.verify_mode = ssl.CERT_REQUIRED context.load_verify_locations(CERT_fakehostname) @@ -538,6 +537,7 @@ h.request('GET', '/nonexistent') resp = h.getresponse() self.assertEqual(resp.status, 404) + del server class RequestBodyTest(TestCase): -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Aug 5 08:39:18 2011 From: python-checkins at python.org (ned.deily) Date: Fri, 05 Aug 2011 08:39:18 +0200 Subject: [Python-checkins] =?utf8?b?Y3B5dGhvbiAoMy4yKTogSXNzdWUgIzEyNTQw?= =?utf8?q?=3A_Prevent_zombie_IDLE_processes_on_Windows_due_to_changes?= Message-ID: http://hg.python.org/cpython/rev/cc86f4ca5020 changeset: 71746:cc86f4ca5020 branch: 3.2 parent: 71740:8a0f2887c6b7 user: Ned Deily date: Tue Aug 02 18:47:13 2011 -0700 summary: Issue #12540: Prevent zombie IDLE processes on Windows due to changes in os.kill(). Original patch by Eli Bendersky. files: Lib/idlelib/PyShell.py | 41 ++++++++++------------------- Misc/NEWS | 3 ++ 2 files changed, 18 insertions(+), 26 deletions(-) diff --git a/Lib/idlelib/PyShell.py b/Lib/idlelib/PyShell.py --- a/Lib/idlelib/PyShell.py +++ b/Lib/idlelib/PyShell.py @@ -10,6 +10,7 @@ import threading import traceback import types +import subprocess import linecache from code import InteractiveInterpreter @@ -37,11 +38,6 @@ HOST = '127.0.0.1' # python execution server on localhost loopback PORT = 0 # someday pass in host, port for remote debug capability -try: - from signal import SIGTERM -except ImportError: - SIGTERM = 15 - # Override warnings module to write to warning_stream. Initialize to send IDLE # internal warnings to the console. ScriptBinding.check_syntax() will # temporarily redirect the stream to the shell window to display warnings when @@ -344,13 +340,12 @@ self.port = PORT rpcclt = None - rpcpid = None + rpcsubproc = None def spawn_subprocess(self): if self.subprocess_arglist is None: self.subprocess_arglist = self.build_subprocess_arglist() - args = self.subprocess_arglist - self.rpcpid = os.spawnv(os.P_NOWAIT, sys.executable, args) + self.rpcsubproc = subprocess.Popen(self.subprocess_arglist) def build_subprocess_arglist(self): assert (self.port!=0), ( @@ -365,12 +360,7 @@ command = "__import__('idlelib.run').run.main(%r)" % (del_exitf,) else: command = "__import__('run').main(%r)" % (del_exitf,) - if sys.platform[:3] == 'win' and ' ' in sys.executable: - # handle embedded space in path by quoting the argument - decorated_exec = '"%s"' % sys.executable - else: - decorated_exec = sys.executable - return [decorated_exec] + w + ["-c", command, str(self.port)] + return [sys.executable] + w + ["-c", command, str(self.port)] def start_subprocess(self): addr = (HOST, self.port) @@ -428,7 +418,7 @@ pass # Kill subprocess, spawn a new one, accept connection. self.rpcclt.close() - self.unix_terminate() + self.terminate_subprocess() console = self.tkconsole was_executing = console.executing console.executing = False @@ -469,23 +459,22 @@ self.rpcclt.close() except AttributeError: # no socket pass - self.unix_terminate() + self.terminate_subprocess() self.tkconsole.executing = False self.rpcclt = None - def unix_terminate(self): - "UNIX: make sure subprocess is terminated and collect status" - if hasattr(os, 'kill'): + def terminate_subprocess(self): + "Make sure subprocess is terminated" + try: + self.rpcsubproc.kill() + except OSError: + # process already terminated + return + else: try: - os.kill(self.rpcpid, SIGTERM) + self.rpcsubproc.wait() except OSError: - # process already terminated: return - else: - try: - os.waitpid(self.rpcpid, 0) - except OSError: - return def transfer_path(self): self.runcommand("""if 1: diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -41,6 +41,9 @@ Library ------- +- Issue #12540: Prevent zombie IDLE processes on Windows due to changes + in os.kill(). + - Issue #12683: urlparse updated to include svn as schemes that uses relative paths. (svn from 1.5 onwards support relative path). -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Aug 5 08:39:19 2011 From: python-checkins at python.org (ned.deily) Date: Fri, 05 Aug 2011 08:39:19 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=28merge_3=2E2_-=3E_default=29?= =?utf8?q?=3A_Issue_=2312540=3A_Prevent_zombie_IDLE_processes_on_Windows_d?= =?utf8?q?ue_to_changes?= Message-ID: http://hg.python.org/cpython/rev/c2fd1ce1c6d4 changeset: 71747:c2fd1ce1c6d4 parent: 71745:210d9a2dab09 parent: 71746:cc86f4ca5020 user: Ned Deily date: Thu Aug 04 23:38:19 2011 -0700 summary: Issue #12540: Prevent zombie IDLE processes on Windows due to changes in os.kill(). Original patch by Eli Bendersky. files: Lib/idlelib/PyShell.py | 41 ++++++++++------------------- Misc/NEWS | 3 ++ 2 files changed, 18 insertions(+), 26 deletions(-) diff --git a/Lib/idlelib/PyShell.py b/Lib/idlelib/PyShell.py --- a/Lib/idlelib/PyShell.py +++ b/Lib/idlelib/PyShell.py @@ -10,6 +10,7 @@ import threading import traceback import types +import subprocess import linecache from code import InteractiveInterpreter @@ -37,11 +38,6 @@ HOST = '127.0.0.1' # python execution server on localhost loopback PORT = 0 # someday pass in host, port for remote debug capability -try: - from signal import SIGTERM -except ImportError: - SIGTERM = 15 - # Override warnings module to write to warning_stream. Initialize to send IDLE # internal warnings to the console. ScriptBinding.check_syntax() will # temporarily redirect the stream to the shell window to display warnings when @@ -344,13 +340,12 @@ self.port = PORT rpcclt = None - rpcpid = None + rpcsubproc = None def spawn_subprocess(self): if self.subprocess_arglist is None: self.subprocess_arglist = self.build_subprocess_arglist() - args = self.subprocess_arglist - self.rpcpid = os.spawnv(os.P_NOWAIT, sys.executable, args) + self.rpcsubproc = subprocess.Popen(self.subprocess_arglist) def build_subprocess_arglist(self): assert (self.port!=0), ( @@ -365,12 +360,7 @@ command = "__import__('idlelib.run').run.main(%r)" % (del_exitf,) else: command = "__import__('run').main(%r)" % (del_exitf,) - if sys.platform[:3] == 'win' and ' ' in sys.executable: - # handle embedded space in path by quoting the argument - decorated_exec = '"%s"' % sys.executable - else: - decorated_exec = sys.executable - return [decorated_exec] + w + ["-c", command, str(self.port)] + return [sys.executable] + w + ["-c", command, str(self.port)] def start_subprocess(self): addr = (HOST, self.port) @@ -428,7 +418,7 @@ pass # Kill subprocess, spawn a new one, accept connection. self.rpcclt.close() - self.unix_terminate() + self.terminate_subprocess() console = self.tkconsole was_executing = console.executing console.executing = False @@ -469,23 +459,22 @@ self.rpcclt.close() except AttributeError: # no socket pass - self.unix_terminate() + self.terminate_subprocess() self.tkconsole.executing = False self.rpcclt = None - def unix_terminate(self): - "UNIX: make sure subprocess is terminated and collect status" - if hasattr(os, 'kill'): + def terminate_subprocess(self): + "Make sure subprocess is terminated" + try: + self.rpcsubproc.kill() + except OSError: + # process already terminated + return + else: try: - os.kill(self.rpcpid, SIGTERM) + self.rpcsubproc.wait() except OSError: - # process already terminated: return - else: - try: - os.waitpid(self.rpcpid, 0) - except OSError: - return def transfer_path(self): self.runcommand("""if 1: diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -249,6 +249,9 @@ Library ------- +- Issue #12540: Prevent zombie IDLE processes on Windows due to changes + in os.kill(). + - Issue #12683: urlparse updated to include svn as schemes that uses relative paths. (svn from 1.5 onwards support relative path). -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Aug 5 17:48:43 2011 From: python-checkins at python.org (eric.araujo) Date: Fri, 05 Aug 2011 17:48:43 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Fix_wrong_use_of_root_logge?= =?utf8?q?r_in_packaging_=28also_a_NameError=29?= Message-ID: http://hg.python.org/cpython/rev/0c1c9bb590a9 changeset: 71748:0c1c9bb590a9 user: ?ric Araujo date: Thu Aug 04 17:17:07 2011 +0200 summary: Fix wrong use of root logger in packaging (also a NameError) files: Lib/packaging/util.py | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Lib/packaging/util.py b/Lib/packaging/util.py --- a/Lib/packaging/util.py +++ b/Lib/packaging/util.py @@ -782,7 +782,7 @@ """ logger.debug('spawn: running %r', cmd) if dry_run: - logging.debug('dry run, no process actually spawned') + logger.debug('dry run, no process actually spawned') return if sys.platform == 'darwin': global _cfg_target, _cfg_target_split -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Aug 5 23:06:20 2011 From: python-checkins at python.org (sandro.tosi) Date: Fri, 05 Aug 2011 23:06:20 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_=2311572=3A_improvements_to?= =?utf8?q?_copy_module_tests_along_with_removal_of_old_test_suite?= Message-ID: http://hg.python.org/cpython/rev/74e79b2c114a changeset: 71749:74e79b2c114a user: Sandro Tosi date: Fri Aug 05 23:05:35 2011 +0200 summary: #11572: improvements to copy module tests along with removal of old test suite files: Lib/copy.py | 65 ----------- Lib/test/test_copy.py | 168 ++++++++++++++++------------- 2 files changed, 95 insertions(+), 138 deletions(-) diff --git a/Lib/copy.py b/Lib/copy.py --- a/Lib/copy.py +++ b/Lib/copy.py @@ -323,68 +323,3 @@ # Helper for instance creation without calling __init__ class _EmptyClass: pass - -def _test(): - l = [None, 1, 2, 3.14, 'xyzzy', (1, 2), [3.14, 'abc'], - {'abc': 'ABC'}, (), [], {}] - l1 = copy(l) - print(l1==l) - l1 = map(copy, l) - print(l1==l) - l1 = deepcopy(l) - print(l1==l) - class C: - def __init__(self, arg=None): - self.a = 1 - self.arg = arg - if __name__ == '__main__': - import sys - file = sys.argv[0] - else: - file = __file__ - self.fp = open(file) - self.fp.close() - def __getstate__(self): - return {'a': self.a, 'arg': self.arg} - def __setstate__(self, state): - for key, value in state.items(): - setattr(self, key, value) - def __deepcopy__(self, memo=None): - new = self.__class__(deepcopy(self.arg, memo)) - new.a = self.a - return new - c = C('argument sketch') - l.append(c) - l2 = copy(l) - print(l == l2) - print(l) - print(l2) - l2 = deepcopy(l) - print(l == l2) - print(l) - print(l2) - l.append({l[1]: l, 'xyz': l[2]}) - l3 = copy(l) - import reprlib - print(map(reprlib.repr, l)) - print(map(reprlib.repr, l1)) - print(map(reprlib.repr, l2)) - print(map(reprlib.repr, l3)) - l3 = deepcopy(l) - print(map(reprlib.repr, l)) - print(map(reprlib.repr, l1)) - print(map(reprlib.repr, l2)) - print(map(reprlib.repr, l3)) - class odict(dict): - def __init__(self, d = {}): - self.a = 99 - dict.__init__(self, d) - def __setitem__(self, k, i): - dict.__setitem__(self, k, i) - self.a - o = odict({"A" : "B"}) - x = deepcopy(o) - print(o, x) - -if __name__ == '__main__': - _test() diff --git a/Lib/test/test_copy.py b/Lib/test/test_copy.py --- a/Lib/test/test_copy.py +++ b/Lib/test/test_copy.py @@ -17,7 +17,7 @@ # Attempt full line coverage of copy.py from top to bottom def test_exceptions(self): - self.assertTrue(copy.Error is copy.error) + self.assertIs(copy.Error, copy.error) self.assertTrue(issubclass(copy.Error, Exception)) # The copy() method @@ -54,20 +54,26 @@ def test_copy_reduce_ex(self): class C(object): def __reduce_ex__(self, proto): + c.append(1) return "" def __reduce__(self): - raise support.TestFailed("shouldn't call this") + self.fail("shouldn't call this") + c = [] x = C() y = copy.copy(x) - self.assertTrue(y is x) + self.assertIs(y, x) + self.assertEqual(c, [1]) def test_copy_reduce(self): class C(object): def __reduce__(self): + c.append(1) return "" + c = [] x = C() y = copy.copy(x) - self.assertTrue(y is x) + self.assertIs(y, x) + self.assertEqual(c, [1]) def test_copy_cant(self): class C(object): @@ -91,7 +97,7 @@ "hello", "hello\u1234", f.__code__, NewStyle, range(10), Classic, max] for x in tests: - self.assertTrue(copy.copy(x) is x, repr(x)) + self.assertIs(copy.copy(x), x) def test_copy_list(self): x = [1, 2, 3] @@ -185,9 +191,9 @@ x = [x, x] y = copy.deepcopy(x) self.assertEqual(y, x) - self.assertTrue(y is not x) - self.assertTrue(y[0] is not x[0]) - self.assertTrue(y[0] is y[1]) + self.assertIsNot(y, x) + self.assertIsNot(y[0], x[0]) + self.assertIs(y[0], y[1]) def test_deepcopy_issubclass(self): # XXX Note: there's no way to test the TypeError coming out of @@ -227,20 +233,26 @@ def test_deepcopy_reduce_ex(self): class C(object): def __reduce_ex__(self, proto): + c.append(1) return "" def __reduce__(self): - raise support.TestFailed("shouldn't call this") + self.fail("shouldn't call this") + c = [] x = C() y = copy.deepcopy(x) - self.assertTrue(y is x) + self.assertIs(y, x) + self.assertEqual(c, [1]) def test_deepcopy_reduce(self): class C(object): def __reduce__(self): + c.append(1) return "" + c = [] x = C() y = copy.deepcopy(x) - self.assertTrue(y is x) + self.assertIs(y, x) + self.assertEqual(c, [1]) def test_deepcopy_cant(self): class C(object): @@ -264,14 +276,14 @@ "hello", "hello\u1234", f.__code__, NewStyle, range(10), Classic, max] for x in tests: - self.assertTrue(copy.deepcopy(x) is x, repr(x)) + self.assertIs(copy.deepcopy(x), x) def test_deepcopy_list(self): x = [[1, 2], 3] y = copy.deepcopy(x) self.assertEqual(y, x) - self.assertTrue(x is not y) - self.assertTrue(x[0] is not y[0]) + self.assertIsNot(x, y) + self.assertIsNot(x[0], y[0]) def test_deepcopy_reflexive_list(self): x = [] @@ -279,16 +291,26 @@ y = copy.deepcopy(x) for op in comparisons: self.assertRaises(RuntimeError, op, y, x) - self.assertTrue(y is not x) - self.assertTrue(y[0] is y) + self.assertIsNot(y, x) + self.assertIs(y[0], y) self.assertEqual(len(y), 1) + def test_deepcopy_empty_tuple(self): + x = () + y = copy.deepcopy(x) + self.assertIs(x, y) + def test_deepcopy_tuple(self): x = ([1, 2], 3) y = copy.deepcopy(x) self.assertEqual(y, x) - self.assertTrue(x is not y) - self.assertTrue(x[0] is not y[0]) + self.assertIsNot(x, y) + self.assertIsNot(x[0], y[0]) + + def test_deepcopy_tuple_of_immutables(self): + x = ((1, 2), 3) + y = copy.deepcopy(x) + self.assertIs(x, y) def test_deepcopy_reflexive_tuple(self): x = ([],) @@ -296,16 +318,16 @@ y = copy.deepcopy(x) for op in comparisons: self.assertRaises(RuntimeError, op, y, x) - self.assertTrue(y is not x) - self.assertTrue(y[0] is not x[0]) - self.assertTrue(y[0][0] is y) + self.assertIsNot(y, x) + self.assertIsNot(y[0], x[0]) + self.assertIs(y[0][0], y) def test_deepcopy_dict(self): x = {"foo": [1, 2], "bar": 3} y = copy.deepcopy(x) self.assertEqual(y, x) - self.assertTrue(x is not y) - self.assertTrue(x["foo"] is not y["foo"]) + self.assertIsNot(x, y) + self.assertIsNot(x["foo"], y["foo"]) def test_deepcopy_reflexive_dict(self): x = {} @@ -315,8 +337,8 @@ self.assertRaises(TypeError, op, y, x) for op in equality_comparisons: self.assertRaises(RuntimeError, op, y, x) - self.assertTrue(y is not x) - self.assertTrue(y['foo'] is y) + self.assertIsNot(y, x) + self.assertIs(y['foo'], y) self.assertEqual(len(y), 1) def test_deepcopy_keepalive(self): @@ -349,7 +371,7 @@ x = C([42]) y = copy.deepcopy(x) self.assertEqual(y, x) - self.assertTrue(y.foo is not x.foo) + self.assertIsNot(y.foo, x.foo) def test_deepcopy_inst_deepcopy(self): class C: @@ -362,8 +384,8 @@ x = C([42]) y = copy.deepcopy(x) self.assertEqual(y, x) - self.assertTrue(y is not x) - self.assertTrue(y.foo is not x.foo) + self.assertIsNot(y, x) + self.assertIsNot(y.foo, x.foo) def test_deepcopy_inst_getinitargs(self): class C: @@ -376,8 +398,8 @@ x = C([42]) y = copy.deepcopy(x) self.assertEqual(y, x) - self.assertTrue(y is not x) - self.assertTrue(y.foo is not x.foo) + self.assertIsNot(y, x) + self.assertIsNot(y.foo, x.foo) def test_deepcopy_inst_getstate(self): class C: @@ -390,8 +412,8 @@ x = C([42]) y = copy.deepcopy(x) self.assertEqual(y, x) - self.assertTrue(y is not x) - self.assertTrue(y.foo is not x.foo) + self.assertIsNot(y, x) + self.assertIsNot(y.foo, x.foo) def test_deepcopy_inst_setstate(self): class C: @@ -404,8 +426,8 @@ x = C([42]) y = copy.deepcopy(x) self.assertEqual(y, x) - self.assertTrue(y is not x) - self.assertTrue(y.foo is not x.foo) + self.assertIsNot(y, x) + self.assertIsNot(y.foo, x.foo) def test_deepcopy_inst_getstate_setstate(self): class C: @@ -420,8 +442,8 @@ x = C([42]) y = copy.deepcopy(x) self.assertEqual(y, x) - self.assertTrue(y is not x) - self.assertTrue(y.foo is not x.foo) + self.assertIsNot(y, x) + self.assertIsNot(y.foo, x.foo) def test_deepcopy_reflexive_inst(self): class C: @@ -429,8 +451,8 @@ x = C() x.foo = x y = copy.deepcopy(x) - self.assertTrue(y is not x) - self.assertTrue(y.foo is y) + self.assertIsNot(y, x) + self.assertIs(y.foo, y) # _reconstruct() @@ -440,9 +462,9 @@ return "" x = C() y = copy.copy(x) - self.assertTrue(y is x) + self.assertIs(y, x) y = copy.deepcopy(x) - self.assertTrue(y is x) + self.assertIs(y, x) def test_reconstruct_nostate(self): class C(object): @@ -451,9 +473,9 @@ x = C() x.foo = 42 y = copy.copy(x) - self.assertTrue(y.__class__ is x.__class__) + self.assertIs(y.__class__, x.__class__) y = copy.deepcopy(x) - self.assertTrue(y.__class__ is x.__class__) + self.assertIs(y.__class__, x.__class__) def test_reconstruct_state(self): class C(object): @@ -467,7 +489,7 @@ self.assertEqual(y, x) y = copy.deepcopy(x) self.assertEqual(y, x) - self.assertTrue(y.foo is not x.foo) + self.assertIsNot(y.foo, x.foo) def test_reconstruct_state_setstate(self): class C(object): @@ -483,7 +505,7 @@ self.assertEqual(y, x) y = copy.deepcopy(x) self.assertEqual(y, x) - self.assertTrue(y.foo is not x.foo) + self.assertIsNot(y.foo, x.foo) def test_reconstruct_reflexive(self): class C(object): @@ -491,8 +513,8 @@ x = C() x.foo = x y = copy.deepcopy(x) - self.assertTrue(y is not x) - self.assertTrue(y.foo is y) + self.assertIsNot(y, x) + self.assertIs(y.foo, y) # Additions for Python 2.3 and pickle protocol 2 @@ -506,12 +528,12 @@ x = C([[1, 2], 3]) y = copy.copy(x) self.assertEqual(x, y) - self.assertTrue(x is not y) - self.assertTrue(x[0] is y[0]) + self.assertIsNot(x, y) + self.assertIs(x[0], y[0]) y = copy.deepcopy(x) self.assertEqual(x, y) - self.assertTrue(x is not y) - self.assertTrue(x[0] is not y[0]) + self.assertIsNot(x, y) + self.assertIsNot(x[0], y[0]) def test_reduce_5tuple(self): class C(dict): @@ -523,12 +545,12 @@ x = C([("foo", [1, 2]), ("bar", 3)]) y = copy.copy(x) self.assertEqual(x, y) - self.assertTrue(x is not y) - self.assertTrue(x["foo"] is y["foo"]) + self.assertIsNot(x, y) + self.assertIs(x["foo"], y["foo"]) y = copy.deepcopy(x) self.assertEqual(x, y) - self.assertTrue(x is not y) - self.assertTrue(x["foo"] is not y["foo"]) + self.assertIsNot(x, y) + self.assertIsNot(x["foo"], y["foo"]) def test_copy_slots(self): class C(object): @@ -536,7 +558,7 @@ x = C() x.foo = [42] y = copy.copy(x) - self.assertTrue(x.foo is y.foo) + self.assertIs(x.foo, y.foo) def test_deepcopy_slots(self): class C(object): @@ -545,7 +567,7 @@ x.foo = [42] y = copy.deepcopy(x) self.assertEqual(x.foo, y.foo) - self.assertTrue(x.foo is not y.foo) + self.assertIsNot(x.foo, y.foo) def test_deepcopy_dict_subclass(self): class C(dict): @@ -562,7 +584,7 @@ y = copy.deepcopy(x) self.assertEqual(x, y) self.assertEqual(x._keys, y._keys) - self.assertTrue(x is not y) + self.assertIsNot(x, y) x['bar'] = 1 self.assertNotEqual(x, y) self.assertNotEqual(x._keys, y._keys) @@ -575,8 +597,8 @@ y = copy.copy(x) self.assertEqual(list(x), list(y)) self.assertEqual(x.foo, y.foo) - self.assertTrue(x[0] is y[0]) - self.assertTrue(x.foo is y.foo) + self.assertIs(x[0], y[0]) + self.assertIs(x.foo, y.foo) def test_deepcopy_list_subclass(self): class C(list): @@ -586,8 +608,8 @@ y = copy.deepcopy(x) self.assertEqual(list(x), list(y)) self.assertEqual(x.foo, y.foo) - self.assertTrue(x[0] is not y[0]) - self.assertTrue(x.foo is not y.foo) + self.assertIsNot(x[0], y[0]) + self.assertIsNot(x.foo, y.foo) def test_copy_tuple_subclass(self): class C(tuple): @@ -604,8 +626,8 @@ self.assertEqual(tuple(x), ([1, 2], 3)) y = copy.deepcopy(x) self.assertEqual(tuple(y), ([1, 2], 3)) - self.assertTrue(x is not y) - self.assertTrue(x[0] is not y[0]) + self.assertIsNot(x, y) + self.assertIsNot(x[0], y[0]) def test_getstate_exc(self): class EvilState(object): @@ -633,10 +655,10 @@ obj = C() x = weakref.ref(obj) y = _copy(x) - self.assertTrue(y is x) + self.assertIs(y, x) del obj y = _copy(x) - self.assertTrue(y is x) + self.assertIs(y, x) def test_copy_weakref(self): self._check_weakref(copy.copy) @@ -652,7 +674,7 @@ u[a] = b u[c] = d v = copy.copy(u) - self.assertFalse(v is u) + self.assertIsNot(v, u) self.assertEqual(v, u) self.assertEqual(v[a], b) self.assertEqual(v[c], d) @@ -682,8 +704,8 @@ v = copy.deepcopy(u) self.assertNotEqual(v, u) self.assertEqual(len(v), 2) - self.assertFalse(v[a] is b) - self.assertFalse(v[c] is d) + self.assertIsNot(v[a], b) + self.assertIsNot(v[c], d) self.assertEqual(v[a].i, b.i) self.assertEqual(v[c].i, d.i) del c @@ -702,12 +724,12 @@ self.assertNotEqual(v, u) self.assertEqual(len(v), 2) (x, y), (z, t) = sorted(v.items(), key=lambda pair: pair[0].i) - self.assertFalse(x is a) + self.assertIsNot(x, a) self.assertEqual(x.i, a.i) - self.assertTrue(y is b) - self.assertFalse(z is c) + self.assertIs(y, b) + self.assertIsNot(z, c) self.assertEqual(z.i, c.i) - self.assertTrue(t is d) + self.assertIs(t, d) del x, y, z, t del d self.assertEqual(len(v), 1) @@ -720,7 +742,7 @@ f.b = f.m g = copy.deepcopy(f) self.assertEqual(g.m, g.b) - self.assertTrue(g.b.__self__ is g) + self.assertIs(g.b.__self__, g) g.b() -- Repository URL: http://hg.python.org/cpython From solipsis at pitrou.net Sat Aug 6 05:26:32 2011 From: solipsis at pitrou.net (solipsis at pitrou.net) Date: Sat, 06 Aug 2011 05:26:32 +0200 Subject: [Python-checkins] Daily reference leaks (74e79b2c114a): sum=0 Message-ID: results for 74e79b2c114a on branch "default" -------------------------------------------- Command line was: ['./python', '-m', 'test.regrtest', '-uall', '-R', '3:3:/home/antoine/cpython/refleaks/reflogtarUb1', '-x'] From python-checkins at python.org Sat Aug 6 06:28:52 2011 From: python-checkins at python.org (senthil.kumaran) Date: Sat, 06 Aug 2011 06:28:52 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=282=2E7=29=3A_Fix_closes_issu?= =?utf8?q?e12698_-_make_the_no=5Fproxy_environment_variable_handling_a_bit?= Message-ID: http://hg.python.org/cpython/rev/c5a35bcfa3ee changeset: 71750:c5a35bcfa3ee branch: 2.7 parent: 71741:5b7e765ce049 user: Senthil Kumaran date: Sat Aug 06 12:24:33 2011 +0800 summary: Fix closes issue12698 - make the no_proxy environment variable handling a bit lenient (accomodate spaces in between the items) files: Lib/test/test_urllib.py | 3 +++ Lib/urllib.py | 3 ++- 2 files changed, 5 insertions(+), 1 deletions(-) diff --git a/Lib/test/test_urllib.py b/Lib/test/test_urllib.py --- a/Lib/test/test_urllib.py +++ b/Lib/test/test_urllib.py @@ -114,6 +114,9 @@ proxies = urllib.getproxies_environment() # getproxies_environment use lowered case truncated (no '_proxy') keys self.assertEqual('localhost', proxies['no']) + # List of no_proxies with space. + self.env.set('NO_PROXY', 'localhost, anotherdomain.com, newdomain.com') + self.assertTrue(urllib.proxy_bypass_environment('anotherdomain.com')) class urlopen_HttpTests(unittest.TestCase): diff --git a/Lib/urllib.py b/Lib/urllib.py --- a/Lib/urllib.py +++ b/Lib/urllib.py @@ -1366,7 +1366,8 @@ # strip port off host hostonly, port = splitport(host) # check if the host ends with any of the DNS suffixes - for name in no_proxy.split(','): + no_proxy_list = [proxy.strip() for proxy in no_proxy.split(',')] + for name in no_proxy_list: if name and (hostonly.endswith(name) or host.endswith(name)): return 1 # otherwise, don't bypass -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Aug 6 06:28:55 2011 From: python-checkins at python.org (senthil.kumaran) Date: Sat, 06 Aug 2011 06:28:55 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=283=2E2=29=3A_Fix_closes_issu?= =?utf8?q?e12698_-_make_the_no=5Fproxy_environment_variable_handling_a_bit?= Message-ID: http://hg.python.org/cpython/rev/1d4bd059a9b6 changeset: 71751:1d4bd059a9b6 branch: 3.2 parent: 71746:cc86f4ca5020 user: Senthil Kumaran date: Sat Aug 06 12:27:40 2011 +0800 summary: Fix closes issue12698 - make the no_proxy environment variable handling a bit lenient (accomodate spaces in between the items) files: Lib/test/test_urllib.py | 4 +++- Lib/urllib/request.py | 3 ++- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/Lib/test/test_urllib.py b/Lib/test/test_urllib.py --- a/Lib/test/test_urllib.py +++ b/Lib/test/test_urllib.py @@ -135,7 +135,9 @@ proxies = urllib.request.getproxies_environment() # getproxies_environment use lowered case truncated (no '_proxy') keys self.assertEqual('localhost', proxies['no']) - + # List of no_proxies with space. + self.env.set('NO_PROXY', 'localhost, anotherdomain.com, newdomain.com') + self.assertTrue(urllib.request.proxy_bypass_environment('anotherdomain.com')) class urlopen_HttpTests(unittest.TestCase): """Test urlopen() opening a fake http connection.""" diff --git a/Lib/urllib/request.py b/Lib/urllib/request.py --- a/Lib/urllib/request.py +++ b/Lib/urllib/request.py @@ -2265,7 +2265,8 @@ # strip port off host hostonly, port = splitport(host) # check if the host ends with any of the DNS suffixes - for name in no_proxy.split(','): + no_proxy_list = [proxy.strip() for proxy in no_proxy.split(',')] + for name in no_proxy_list: if name and (hostonly.endswith(name) or host.endswith(name)): return 1 # otherwise, don't bypass -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Aug 6 06:28:58 2011 From: python-checkins at python.org (senthil.kumaran) Date: Sat, 06 Aug 2011 06:28:58 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=28merge_3=2E2_-=3E_default=29?= =?utf8?q?=3A_merge_from_3=2E2_-_Fix_closes_issue12698_-_make_the_no=5Fpro?= =?utf8?q?xy_environment_variable?= Message-ID: http://hg.python.org/cpython/rev/47573019bfc8 changeset: 71752:47573019bfc8 parent: 71749:74e79b2c114a parent: 71751:1d4bd059a9b6 user: Senthil Kumaran date: Sat Aug 06 12:28:16 2011 +0800 summary: merge from 3.2 - Fix closes issue12698 - make the no_proxy environment variable handling a bit lenient (accomodate spaces in between the items) files: Lib/test/test_urllib.py | 4 +++- Lib/urllib/request.py | 3 ++- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/Lib/test/test_urllib.py b/Lib/test/test_urllib.py --- a/Lib/test/test_urllib.py +++ b/Lib/test/test_urllib.py @@ -135,7 +135,9 @@ proxies = urllib.request.getproxies_environment() # getproxies_environment use lowered case truncated (no '_proxy') keys self.assertEqual('localhost', proxies['no']) - + # List of no_proxies with space. + self.env.set('NO_PROXY', 'localhost, anotherdomain.com, newdomain.com') + self.assertTrue(urllib.request.proxy_bypass_environment('anotherdomain.com')) class urlopen_HttpTests(unittest.TestCase): """Test urlopen() opening a fake http connection.""" diff --git a/Lib/urllib/request.py b/Lib/urllib/request.py --- a/Lib/urllib/request.py +++ b/Lib/urllib/request.py @@ -2274,7 +2274,8 @@ # strip port off host hostonly, port = splitport(host) # check if the host ends with any of the DNS suffixes - for name in no_proxy.split(','): + no_proxy_list = [proxy.strip() for proxy in no_proxy.split(',')] + for name in no_proxy_list: if name and (hostonly.endswith(name) or host.endswith(name)): return 1 # otherwise, don't bypass -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Aug 6 06:56:33 2011 From: python-checkins at python.org (senthil.kumaran) Date: Sat, 06 Aug 2011 06:56:33 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=282=2E7=29=3A_Fix_closes_issu?= =?utf8?q?e11047_-_Correct_the_2=2E7_whatsnew_description_for_issue_7902?= =?utf8?q?=2E?= Message-ID: http://hg.python.org/cpython/rev/298df0970eec changeset: 71753:298df0970eec branch: 2.7 parent: 71750:c5a35bcfa3ee user: Senthil Kumaran date: Sat Aug 06 12:52:56 2011 +0800 summary: Fix closes issue11047 - Correct the 2.7 whatsnew description for issue 7902. files: Doc/whatsnew/2.7.rst | 4 ++-- 1 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Doc/whatsnew/2.7.rst b/Doc/whatsnew/2.7.rst --- a/Doc/whatsnew/2.7.rst +++ b/Doc/whatsnew/2.7.rst @@ -783,8 +783,8 @@ (Contributed by Fredrik Johansson and Victor Stinner; :issue:`3439`.) -* The :keyword:`import` statement will no longer try a relative import - if an absolute import (e.g. ``from .os import sep``) fails. This +* The :keyword:`import` statement will no longer try a absolute import + if a relative import (e.g. ``from .os import sep``) fails. This fixes a bug, but could possibly break certain :keyword:`import` statements that were only working by accident. (Fixed by Meador Inge; :issue:`7902`.) -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Aug 6 06:56:34 2011 From: python-checkins at python.org (senthil.kumaran) Date: Sat, 06 Aug 2011 06:56:34 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=283=2E2=29=3A_merge_from_2=2E?= =?utf8?q?7_-_Fix_closes_issue11047_-_Correct_the_2=2E7_whatsnew_descripti?= =?utf8?q?on?= Message-ID: http://hg.python.org/cpython/rev/dafdbd0d570a changeset: 71754:dafdbd0d570a branch: 3.2 parent: 71751:1d4bd059a9b6 user: Senthil Kumaran date: Sat Aug 06 12:54:23 2011 +0800 summary: merge from 2.7 - Fix closes issue11047 - Correct the 2.7 whatsnew description for issue 7902. files: Doc/whatsnew/2.7.rst | 4 ++-- 1 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Doc/whatsnew/2.7.rst b/Doc/whatsnew/2.7.rst --- a/Doc/whatsnew/2.7.rst +++ b/Doc/whatsnew/2.7.rst @@ -782,8 +782,8 @@ (Contributed by Fredrik Johansson and Victor Stinner; :issue:`3439`.) -* The :keyword:`import` statement will no longer try a relative import - if an absolute import (e.g. ``from .os import sep``) fails. This +* The :keyword:`import` statement will no longer try an absolute import + if a relative import (e.g. ``from .os import sep``) fails. This fixes a bug, but could possibly break certain :keyword:`import` statements that were only working by accident. (Fixed by Meador Inge; :issue:`7902`.) -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Aug 6 06:56:35 2011 From: python-checkins at python.org (senthil.kumaran) Date: Sat, 06 Aug 2011 06:56:35 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=282=2E7=29=3A_s/a/an_-_gramma?= =?utf8?q?r=3F?= Message-ID: http://hg.python.org/cpython/rev/13255f27a4bb changeset: 71756:13255f27a4bb branch: 2.7 parent: 71753:298df0970eec user: Senthil Kumaran date: Sat Aug 06 12:56:08 2011 +0800 summary: s/a/an - grammar? files: Doc/whatsnew/2.7.rst | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Doc/whatsnew/2.7.rst b/Doc/whatsnew/2.7.rst --- a/Doc/whatsnew/2.7.rst +++ b/Doc/whatsnew/2.7.rst @@ -783,7 +783,7 @@ (Contributed by Fredrik Johansson and Victor Stinner; :issue:`3439`.) -* The :keyword:`import` statement will no longer try a absolute import +* The :keyword:`import` statement will no longer try an absolute import if a relative import (e.g. ``from .os import sep``) fails. This fixes a bug, but could possibly break certain :keyword:`import` statements that were only working by accident. (Fixed by Meador Inge; -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Aug 6 06:56:35 2011 From: python-checkins at python.org (senthil.kumaran) Date: Sat, 06 Aug 2011 06:56:35 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=28merge_3=2E2_-=3E_default=29?= =?utf8?q?=3A_merge_from_3=2E2_-_Fix_closes_issue11047_-_Correct_the_2=2E7?= =?utf8?q?_whatsnew_description?= Message-ID: http://hg.python.org/cpython/rev/aea6f588230b changeset: 71755:aea6f588230b parent: 71752:47573019bfc8 parent: 71754:dafdbd0d570a user: Senthil Kumaran date: Sat Aug 06 12:55:06 2011 +0800 summary: merge from 3.2 - Fix closes issue11047 - Correct the 2.7 whatsnew description for issue 7902. files: Doc/whatsnew/2.7.rst | 4 ++-- 1 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Doc/whatsnew/2.7.rst b/Doc/whatsnew/2.7.rst --- a/Doc/whatsnew/2.7.rst +++ b/Doc/whatsnew/2.7.rst @@ -782,8 +782,8 @@ (Contributed by Fredrik Johansson and Victor Stinner; :issue:`3439`.) -* The :keyword:`import` statement will no longer try a relative import - if an absolute import (e.g. ``from .os import sep``) fails. This +* The :keyword:`import` statement will no longer try an absolute import + if a relative import (e.g. ``from .os import sep``) fails. This fixes a bug, but could possibly break certain :keyword:`import` statements that were only working by accident. (Fixed by Meador Inge; :issue:`7902`.) -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Aug 6 07:37:55 2011 From: python-checkins at python.org (senthil.kumaran) Date: Sat, 06 Aug 2011 07:37:55 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=282=2E7=29=3A_Fix_closes_Issu?= =?utf8?q?e12697_-_Update_the_usage_syntax_of_timeit_module_in_the_docs=2E?= Message-ID: http://hg.python.org/cpython/rev/fa1e5fe55664 changeset: 71757:fa1e5fe55664 branch: 2.7 user: Senthil Kumaran date: Sat Aug 06 13:34:30 2011 +0800 summary: Fix closes Issue12697 - Update the usage syntax of timeit module in the docs. files: Doc/library/timeit.rst | 12 ++++++------ 1 files changed, 6 insertions(+), 6 deletions(-) diff --git a/Doc/library/timeit.rst b/Doc/library/timeit.rst --- a/Doc/library/timeit.rst +++ b/Doc/library/timeit.rst @@ -195,13 +195,13 @@ :keyword:`try`/:keyword:`except` to test for missing and present object attributes. :: - % timeit.py 'try:' ' str.__nonzero__' 'except AttributeError:' ' pass' + $ python -m timeit 'try:' ' str.__nonzero__' 'except AttributeError:' ' pass' 100000 loops, best of 3: 15.7 usec per loop - % timeit.py 'if hasattr(str, "__nonzero__"): pass' + $ python -m timeit 'if hasattr(str, "__nonzero__"): pass' 100000 loops, best of 3: 4.26 usec per loop - % timeit.py 'try:' ' int.__nonzero__' 'except AttributeError:' ' pass' + $ python -m timeit 'try:' ' int.__nonzero__' 'except AttributeError:' ' pass' 1000000 loops, best of 3: 1.43 usec per loop - % timeit.py 'if hasattr(int, "__nonzero__"): pass' + $ python -m timeit 'if hasattr(int, "__nonzero__"): pass' 100000 loops, best of 3: 2.23 usec per loop :: @@ -242,12 +242,12 @@ ``setup`` parameter which contains an import statement:: def test(): - "Stupid test function" + """Stupid test function""" L = [] for i in range(100): L.append(i) - if __name__=='__main__': + if __name__ == '__main__': from timeit import Timer t = Timer("test()", "from __main__ import test") print t.timeit() -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Aug 6 07:37:56 2011 From: python-checkins at python.org (senthil.kumaran) Date: Sat, 06 Aug 2011 07:37:56 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=283=2E2=29=3A_Fix_closes_Issu?= =?utf8?q?e12697_-_Update_the_usage_syntax_of_timeit_module_in_the_docs=2E?= Message-ID: http://hg.python.org/cpython/rev/87de58db3d40 changeset: 71758:87de58db3d40 branch: 3.2 parent: 71754:dafdbd0d570a user: Senthil Kumaran date: Sat Aug 06 13:37:04 2011 +0800 summary: Fix closes Issue12697 - Update the usage syntax of timeit module in the docs. files: Doc/library/timeit.rst | 12 ++++++------ 1 files changed, 6 insertions(+), 6 deletions(-) diff --git a/Doc/library/timeit.rst b/Doc/library/timeit.rst --- a/Doc/library/timeit.rst +++ b/Doc/library/timeit.rst @@ -191,13 +191,13 @@ :keyword:`try`/:keyword:`except` to test for missing and present object attributes. :: - % timeit.py 'try:' ' str.__bool__' 'except AttributeError:' ' pass' + $ python -m timeit 'try:' ' str.__bool__' 'except AttributeError:' ' pass' 100000 loops, best of 3: 15.7 usec per loop - % timeit.py 'if hasattr(str, "__bool__"): pass' + $ python -m timeit 'if hasattr(str, "__bool__"): pass' 100000 loops, best of 3: 4.26 usec per loop - % timeit.py 'try:' ' int.__bool__' 'except AttributeError:' ' pass' + $ python -m timeit 'try:' ' int.__bool__' 'except AttributeError:' ' pass' 1000000 loops, best of 3: 1.43 usec per loop - % timeit.py 'if hasattr(int, "__bool__"): pass' + $ python -m timeit 'if hasattr(int, "__bool__"): pass' 100000 loops, best of 3: 2.23 usec per loop :: @@ -238,10 +238,10 @@ ``setup`` parameter which contains an import statement:: def test(): - "Stupid test function" + """Stupid test function""" L = [i for i in range(100)] - if __name__=='__main__': + if __name__ == '__main__': from timeit import Timer t = Timer("test()", "from __main__ import test") print(t.timeit()) -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Aug 6 07:38:00 2011 From: python-checkins at python.org (senthil.kumaran) Date: Sat, 06 Aug 2011 07:38:00 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=28merge_3=2E2_-=3E_default=29?= =?utf8?q?=3A_merge_from_3=2E2_-_Fix_closes_Issue12697_-_Update_the_usage_?= =?utf8?q?syntax_of_timeit?= Message-ID: http://hg.python.org/cpython/rev/46b801545126 changeset: 71759:46b801545126 parent: 71755:aea6f588230b parent: 71758:87de58db3d40 user: Senthil Kumaran date: Sat Aug 06 13:37:37 2011 +0800 summary: merge from 3.2 - Fix closes Issue12697 - Update the usage syntax of timeit module in the docs. files: Doc/library/timeit.rst | 12 ++++++------ 1 files changed, 6 insertions(+), 6 deletions(-) diff --git a/Doc/library/timeit.rst b/Doc/library/timeit.rst --- a/Doc/library/timeit.rst +++ b/Doc/library/timeit.rst @@ -191,13 +191,13 @@ :keyword:`try`/:keyword:`except` to test for missing and present object attributes. :: - % timeit.py 'try:' ' str.__bool__' 'except AttributeError:' ' pass' + $ python -m timeit 'try:' ' str.__bool__' 'except AttributeError:' ' pass' 100000 loops, best of 3: 15.7 usec per loop - % timeit.py 'if hasattr(str, "__bool__"): pass' + $ python -m timeit 'if hasattr(str, "__bool__"): pass' 100000 loops, best of 3: 4.26 usec per loop - % timeit.py 'try:' ' int.__bool__' 'except AttributeError:' ' pass' + $ python -m timeit 'try:' ' int.__bool__' 'except AttributeError:' ' pass' 1000000 loops, best of 3: 1.43 usec per loop - % timeit.py 'if hasattr(int, "__bool__"): pass' + $ python -m timeit 'if hasattr(int, "__bool__"): pass' 100000 loops, best of 3: 2.23 usec per loop :: @@ -238,10 +238,10 @@ ``setup`` parameter which contains an import statement:: def test(): - "Stupid test function" + """Stupid test function""" L = [i for i in range(100)] - if __name__=='__main__': + if __name__ == '__main__': from timeit import Timer t = Timer("test()", "from __main__ import test") print(t.timeit()) -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Aug 6 08:33:08 2011 From: python-checkins at python.org (eli.bendersky) Date: Sat, 06 Aug 2011 08:33:08 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=282=2E7=29=3A_Fix_grammar?= Message-ID: http://hg.python.org/cpython/rev/532cad687332 changeset: 71760:532cad687332 branch: 2.7 parent: 71757:fa1e5fe55664 user: Eli Bendersky date: Sat Aug 06 09:29:12 2011 +0300 summary: Fix grammar files: Doc/library/string.rst | 6 +++--- 1 files changed, 3 insertions(+), 3 deletions(-) diff --git a/Doc/library/string.rst b/Doc/library/string.rst --- a/Doc/library/string.rst +++ b/Doc/library/string.rst @@ -729,9 +729,9 @@ to parse template strings. To do this, you can override these class attributes: * *delimiter* -- This is the literal string describing a placeholder introducing - delimiter. The default value ``$``. Note that this should *not* be a regular - expression, as the implementation will call :meth:`re.escape` on this string as - needed. + delimiter. The default value is ``$``. Note that this should *not* be a + regular expression, as the implementation will call :meth:`re.escape` on this + string as needed. * *idpattern* -- This is the regular expression describing the pattern for non-braced placeholders (the braces will be added automatically as -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Aug 6 08:36:47 2011 From: python-checkins at python.org (eli.bendersky) Date: Sat, 06 Aug 2011 08:36:47 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=283=2E2=29=3A_Fix_grammar?= Message-ID: http://hg.python.org/cpython/rev/6151d7cea7fe changeset: 71761:6151d7cea7fe branch: 3.2 parent: 71758:87de58db3d40 user: Eli Bendersky date: Sat Aug 06 09:31:09 2011 +0300 summary: Fix grammar files: Doc/library/string.rst | 6 +++--- 1 files changed, 3 insertions(+), 3 deletions(-) diff --git a/Doc/library/string.rst b/Doc/library/string.rst --- a/Doc/library/string.rst +++ b/Doc/library/string.rst @@ -709,9 +709,9 @@ to parse template strings. To do this, you can override these class attributes: * *delimiter* -- This is the literal string describing a placeholder introducing - delimiter. The default value ``$``. Note that this should *not* be a regular - expression, as the implementation will call :meth:`re.escape` on this string as - needed. + delimiter. The default value is ``$``. Note that this should *not* be a + regular expression, as the implementation will call :meth:`re.escape` on this + string as needed. * *idpattern* -- This is the regular expression describing the pattern for non-braced placeholders (the braces will be added automatically as -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Aug 6 08:36:47 2011 From: python-checkins at python.org (eli.bendersky) Date: Sat, 06 Aug 2011 08:36:47 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=28merge_3=2E2_-=3E_default=29?= =?utf8?q?=3A_Fix_grammar?= Message-ID: http://hg.python.org/cpython/rev/12ad8c975766 changeset: 71762:12ad8c975766 parent: 71759:46b801545126 parent: 71761:6151d7cea7fe user: Eli Bendersky date: Sat Aug 06 09:32:11 2011 +0300 summary: Fix grammar files: Doc/library/string.rst | 6 +++--- 1 files changed, 3 insertions(+), 3 deletions(-) diff --git a/Doc/library/string.rst b/Doc/library/string.rst --- a/Doc/library/string.rst +++ b/Doc/library/string.rst @@ -709,9 +709,9 @@ to parse template strings. To do this, you can override these class attributes: * *delimiter* -- This is the literal string describing a placeholder introducing - delimiter. The default value ``$``. Note that this should *not* be a regular - expression, as the implementation will call :meth:`re.escape` on this string as - needed. + delimiter. The default value is ``$``. Note that this should *not* be a + regular expression, as the implementation will call :meth:`re.escape` on this + string as needed. * *idpattern* -- This is the regular expression describing the pattern for non-braced placeholders (the braces will be added automatically as -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Aug 6 15:05:14 2011 From: python-checkins at python.org (ezio.melotti) Date: Sat, 6 Aug 2011 15:05:14 +0200 (CEST) Subject: [Python-checkins] r88874 - tracker/instances/python-dev/rietveld/templates/issue_base.html Message-ID: <3RWQGL3DN9zMPG@mail.python.org> Author: ezio.melotti Date: Sat Aug 6 15:05:14 2011 New Revision: 88874 Log: Make the issue link in the review page link back to the roundup page. Modified: tracker/instances/python-dev/rietveld/templates/issue_base.html Modified: tracker/instances/python-dev/rietveld/templates/issue_base.html ============================================================================== --- tracker/instances/python-dev/rietveld/templates/issue_base.html (original) +++ tracker/instances/python-dev/rietveld/templates/issue_base.html Sat Aug 6 15:05:14 2011 @@ -38,9 +38,8 @@ height="15" border="0">
    {%endif%} - Issue - {{issue.key.id}}: + #{{issue.key.id}}: {{issue.subject}} {%if issue.closed %} (Closed) {%endif%} From python-checkins at python.org Sat Aug 6 15:07:51 2011 From: python-checkins at python.org (ezio.melotti) Date: Sat, 6 Aug 2011 15:07:51 +0200 (CEST) Subject: [Python-checkins] r88875 - tracker/instances/python-dev/rietveld/templates/issue_base.html Message-ID: <3RWQKM3WLlzMPR@mail.python.org> Author: ezio.melotti Date: Sat Aug 6 15:07:51 2011 New Revision: 88875 Log: Remove links to create new issues from the review page. Modified: tracker/instances/python-dev/rietveld/templates/issue_base.html Modified: tracker/instances/python-dev/rietveld/templates/issue_base.html ============================================================================== --- tracker/instances/python-dev/rietveld/templates/issue_base.html (original) +++ tracker/instances/python-dev/rietveld/templates/issue_base.html Sat Aug 6 15:07:51 2011 @@ -8,12 +8,6 @@ {%block mainmenu2%} {%if user%} - {%if uploadpy_hint%} - Create Issue - {%else%} - Create Issue - {%endif%} - | My Issues | Recent Issues From python-checkins at python.org Sun Aug 7 04:04:19 2011 From: python-checkins at python.org (senthil.kumaran) Date: Sun, 07 Aug 2011 04:04:19 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=282=2E7=29=3A_Remove_the_old_?= =?utf8?q?dead_test_program_within_the_module_in_2=2E7=2E?= Message-ID: http://hg.python.org/cpython/rev/f93ec4da0278 changeset: 71763:f93ec4da0278 branch: 2.7 parent: 71760:532cad687332 user: Senthil Kumaran date: Sun Aug 07 10:02:49 2011 +0800 summary: Remove the old dead test program within the module in 2.7. files: Lib/urllib.py | 62 --------------------------------------- 1 files changed, 0 insertions(+), 62 deletions(-) diff --git a/Lib/urllib.py b/Lib/urllib.py --- a/Lib/urllib.py +++ b/Lib/urllib.py @@ -1600,65 +1600,3 @@ print "Block number: %d, Block size: %d, Total size: %d" % ( blocknum, blocksize, totalsize) -# Test program -def test(args=[]): - if not args: - args = [ - '/etc/passwd', - 'file:/etc/passwd', - 'file://localhost/etc/passwd', - 'ftp://ftp.gnu.org/pub/README', - 'http://www.python.org/index.html', - ] - if hasattr(URLopener, "open_https"): - args.append('https://synergy.as.cmu.edu/~geek/') - try: - for url in args: - print '-'*10, url, '-'*10 - fn, h = urlretrieve(url, None, reporthook) - print fn - if h: - print '======' - for k in h.keys(): print k + ':', h[k] - print '======' - with open(fn, 'rb') as fp: - data = fp.read() - if '\r' in data: - table = string.maketrans("", "") - data = data.translate(table, "\r") - print data - fn, h = None, None - print '-'*40 - finally: - urlcleanup() - -def main(): - import getopt, sys - try: - opts, args = getopt.getopt(sys.argv[1:], "th") - except getopt.error, msg: - print msg - print "Use -h for help" - return - t = 0 - for o, a in opts: - if o == '-t': - t = t + 1 - if o == '-h': - print "Usage: python urllib.py [-t] [url ...]" - print "-t runs self-test;", - print "otherwise, contents of urls are printed" - return - if t: - if t > 1: - test1() - test(args) - else: - if not args: - print "Use -h for help" - for url in args: - print urlopen(url).read(), - -# Run test program when run as a script -if __name__ == '__main__': - main() -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sun Aug 7 04:04:20 2011 From: python-checkins at python.org (senthil.kumaran) Date: Sun, 07 Aug 2011 04:04:20 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=282=2E7=29=3A_whitespace_fix?= =?utf8?q?=2E?= Message-ID: http://hg.python.org/cpython/rev/f26662c34707 changeset: 71764:f26662c34707 branch: 2.7 user: Senthil Kumaran date: Sun Aug 07 10:03:58 2011 +0800 summary: whitespace fix. files: Lib/urllib.py | 1 - 1 files changed, 0 insertions(+), 1 deletions(-) diff --git a/Lib/urllib.py b/Lib/urllib.py --- a/Lib/urllib.py +++ b/Lib/urllib.py @@ -1599,4 +1599,3 @@ # Report during remote transfers print "Block number: %d, Block size: %d, Total size: %d" % ( blocknum, blocksize, totalsize) - -- Repository URL: http://hg.python.org/cpython From solipsis at pitrou.net Sun Aug 7 05:24:54 2011 From: solipsis at pitrou.net (solipsis at pitrou.net) Date: Sun, 07 Aug 2011 05:24:54 +0200 Subject: [Python-checkins] Daily reference leaks (12ad8c975766): sum=0 Message-ID: results for 12ad8c975766 on branch "default" -------------------------------------------- Command line was: ['./python', '-m', 'test.regrtest', '-uall', '-R', '3:3:/home/antoine/cpython/refleaks/reflog8mVH30', '-x'] From python-checkins at python.org Sun Aug 7 14:45:01 2011 From: python-checkins at python.org (ezio.melotti) Date: Sun, 7 Aug 2011 14:45:01 +0200 (CEST) Subject: [Python-checkins] r88876 - in tracker/instances/python-dev/html: _generic.index.html _generic.keywords_expr.html issue.search.html page.html style.css Message-ID: <3RX1mY4xm0zMV7@mail.python.org> Author: ezio.melotti Date: Sun Aug 7 14:45:01 2011 New Revision: 88876 Log: #411: Upgrade the python-dev instance to 1.4.19. Added: tracker/instances/python-dev/html/_generic.keywords_expr.html Modified: tracker/instances/python-dev/html/_generic.index.html tracker/instances/python-dev/html/issue.search.html tracker/instances/python-dev/html/page.html tracker/instances/python-dev/html/style.css Modified: tracker/instances/python-dev/html/_generic.index.html ============================================================================== --- tracker/instances/python-dev/html/_generic.index.html (original) +++ tracker/instances/python-dev/html/_generic.index.html Sun Aug 7 14:45:01 2011 @@ -37,7 +37,8 @@

    Remove entries by deleting their line. Add new entries by appending - them to the table - put an X in the id column. + them to the table - put an X in the id column. If you wish to restore a + removed item and you know its id then just put that id in the id column.

    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Modified: tracker/instances/python-dev/html/issue.search.html ============================================================================== --- tracker/instances/python-dev/html/issue.search.html (original) +++ tracker/instances/python-dev/html/issue.search.html Sun Aug 7 14:45:01 2011 @@ -12,10 +12,10 @@ cols python:request.columns or 'id activity title status assignedto'.split(); sort_on python:request.sort and request.sort[0] or nothing; sort_desc python:sort_on and sort_on[0] == '-'; - sort_on python:(sort_on and sort_on[1]) or 'activity'; + sort_on python:(sort_on and sort_on[1]) or (not request.nodeid and 'activity') or ''; group_on python:request.group and request.group[0] or nothing; group_desc python:group_on and group_on[0] == '-'; - group_on python:(group_on and group_on[1]) or 'priority'; + group_on python:(group_on and group_on[1]) or (not request.nodeid and 'priority') or ''; search_input templates/page/macros/search_input; search_date templates/page/macros/search_date; @@ -23,6 +23,7 @@ sort_input templates/page/macros/sort_input; group_input templates/page/macros/group_input; search_select templates/page/macros/search_select; + search_select_keywords templates/page/macros/search_select_keywords; search_select_translated templates/page/macros/search_select_translated; search_multiselect templates/page/macros/search_multiselect;"> @@ -208,11 +209,11 @@ - Keyword: - + @@ -280,8 +281,8 @@ No Sort or group:     - - + + Modified: tracker/instances/python-dev/html/page.html ============================================================================== --- tracker/instances/python-dev/html/page.html (original) +++ tracker/instances/python-dev/html/page.html Sun Aug 7 14:45:01 2011 @@ -331,7 +331,7 @@ - @@ -347,6 +347,22 @@ + +
    + + (expr) +
    + + Modified: tracker/instances/python-dev/html/style.css ============================================================================== --- tracker/instances/python-dev/html/style.css (original) +++ tracker/instances/python-dev/html/style.css Sun Aug 7 14:45:01 2011 @@ -481,12 +481,12 @@ #demowarning { - position: absolute; - top: 10px; - left: 260px; - font-weight: bold; - font-size: 110%; - color: red; + position: absolute; + top: 10px; + left: 260px; + font-weight: bold; + font-size: 110%; + color: red; } @@ -499,5 +499,17 @@ } .closed { -text-decoration: line-through !important; + text-decoration: line-through !important; +} + +.calendar_display { + text-align: center; +} + +.calendar_display td { + padding: 1px 4px 1px 4px; +} + +.calendar_display .today { + background-color: #afafaf; } From python-checkins at python.org Sun Aug 7 16:11:41 2011 From: python-checkins at python.org (ezio.melotti) Date: Sun, 7 Aug 2011 16:11:41 +0200 (CEST) Subject: [Python-checkins] r88877 - in tracker: instances/python-dev/html/_generic.keywords_expr.html instances/python-dev/html/issue.search.html instances/python-dev/html/page.html roundup-src/roundup/cgi/KeywordsExpr.py Message-ID: <3RX3hY40fvzMYH@mail.python.org> Author: ezio.melotti Date: Sun Aug 7 16:11:41 2011 New Revision: 88877 Log: Fix the new keyword(s) search. Modified: tracker/instances/python-dev/html/_generic.keywords_expr.html tracker/instances/python-dev/html/issue.search.html tracker/instances/python-dev/html/page.html tracker/roundup-src/roundup/cgi/KeywordsExpr.py Modified: tracker/instances/python-dev/html/_generic.keywords_expr.html ============================================================================== --- tracker/instances/python-dev/html/_generic.keywords_expr.html (original) +++ tracker/instances/python-dev/html/_generic.keywords_expr.html Sun Aug 7 16:11:41 2011 @@ -9,36 +9,3 @@ tal:content="structure python:utils.keywords_expressions(request)"> - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - Modified: tracker/instances/python-dev/html/issue.search.html ============================================================================== --- tracker/instances/python-dev/html/issue.search.html (original) +++ tracker/instances/python-dev/html/issue.search.html Sun Aug 7 16:11:41 2011 @@ -209,7 +209,7 @@ - Keyword: Modified: tracker/instances/python-dev/html/page.html ============================================================================== --- tracker/instances/python-dev/html/page.html (original) +++ tracker/instances/python-dev/html/page.html Sun Aug 7 16:11:41 2011 @@ -348,8 +348,8 @@ -
    - @@ -359,7 +359,7 @@ tal:content="python:s[db_content]"> (expr) + href="javascript:help_window('issue?@template=keywords_expr&property=keyword&form=itemSynopsis', 500, 200)">(expr)
    Modified: tracker/roundup-src/roundup/cgi/KeywordsExpr.py ============================================================================== --- tracker/roundup-src/roundup/cgi/KeywordsExpr.py (original) +++ tracker/roundup-src/roundup/cgi/KeywordsExpr.py Sun Aug 7 16:11:41 2011 @@ -119,7 +119,7 @@ } function render_select(handler) { - var out = ''; out += parse(current).infix(); return out; } From python-checkins at python.org Sun Aug 7 17:15:02 2011 From: python-checkins at python.org (sandro.tosi) Date: Sun, 07 Aug 2011 17:15:02 +0200 Subject: [Python-checkins] =?utf8?b?Y3B5dGhvbiAoMi43KTogIzEyNjc3OiBjb3Jy?= =?utf8?q?ect_turtle_orientation_in_doc?= Message-ID: http://hg.python.org/cpython/rev/a30e6c4bf486 changeset: 71765:a30e6c4bf486 branch: 2.7 user: Sandro Tosi date: Sun Aug 07 17:09:15 2011 +0200 summary: #12677: correct turtle orientation in doc files: Demo/turtle/about_turtle.txt | 4 ++-- Doc/library/turtle.rst | 4 ++-- Lib/lib-tk/turtle.py | 4 ++-- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/Demo/turtle/about_turtle.txt b/Demo/turtle/about_turtle.txt --- a/Demo/turtle/about_turtle.txt +++ b/Demo/turtle/about_turtle.txt @@ -7,10 +7,10 @@ kids. It was part of the original Logo programming language developed by Wally Feurzig and Seymour Papert in 1966. -Imagine a robotic turtle starting at (0, 0) in the x-y plane. Give it +Imagine a robotic turtle starting at (0, 0) in the x-y plane. After an ``import turtle``, give it the command turtle.forward(15), and it moves (on-screen!) 15 pixels in the direction it is facing, drawing a line as it moves. Give it the -command turtle.left(25), and it rotates in-place 25 degrees clockwise. +command turtle.right(25), and it rotates in-place 25 degrees clockwise. By combining together these and similar commands, intricate shapes and pictures can easily be drawn. diff --git a/Doc/library/turtle.rst b/Doc/library/turtle.rst --- a/Doc/library/turtle.rst +++ b/Doc/library/turtle.rst @@ -18,10 +18,10 @@ part of the original Logo programming language developed by Wally Feurzig and Seymour Papert in 1966. -Imagine a robotic turtle starting at (0, 0) in the x-y plane. Give it the +Imagine a robotic turtle starting at (0, 0) in the x-y plane. After an ``import turtle``, give it the command ``turtle.forward(15)``, and it moves (on-screen!) 15 pixels in the direction it is facing, drawing a line as it moves. Give it the command -``turtle.left(25)``, and it rotates in-place 25 degrees clockwise. +``turtle.right(25)``, and it rotates in-place 25 degrees clockwise. By combining together these and similar commands, intricate shapes and pictures can easily be drawn. diff --git a/Lib/lib-tk/turtle.py b/Lib/lib-tk/turtle.py --- a/Lib/lib-tk/turtle.py +++ b/Lib/lib-tk/turtle.py @@ -27,10 +27,10 @@ kids. It was part of the original Logo programming language developed by Wally Feurzig and Seymour Papert in 1966. -Imagine a robotic turtle starting at (0, 0) in the x-y plane. Give it +Imagine a robotic turtle starting at (0, 0) in the x-y plane. After an ``import turtle``, give it the command turtle.forward(15), and it moves (on-screen!) 15 pixels in the direction it is facing, drawing a line as it moves. Give it the -command turtle.left(25), and it rotates in-place 25 degrees clockwise. +command turtle.right(25), and it rotates in-place 25 degrees clockwise. By combining together these and similar commands, intricate shapes and pictures can easily be drawn. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sun Aug 7 17:15:03 2011 From: python-checkins at python.org (sandro.tosi) Date: Sun, 07 Aug 2011 17:15:03 +0200 Subject: [Python-checkins] =?utf8?b?Y3B5dGhvbiAoMy4yKTogIzEyNjc3OiBjb3Jy?= =?utf8?q?ect_turtle_orientation_in_doc?= Message-ID: http://hg.python.org/cpython/rev/bf4f65043d7d changeset: 71766:bf4f65043d7d branch: 3.2 parent: 71761:6151d7cea7fe user: Sandro Tosi date: Sun Aug 07 17:12:19 2011 +0200 summary: #12677: correct turtle orientation in doc files: Doc/library/turtle.rst | 4 ++-- Lib/turtle.py | 4 ++-- Lib/turtledemo/about_turtle.txt | 4 ++-- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/Doc/library/turtle.rst b/Doc/library/turtle.rst --- a/Doc/library/turtle.rst +++ b/Doc/library/turtle.rst @@ -18,10 +18,10 @@ part of the original Logo programming language developed by Wally Feurzig and Seymour Papert in 1966. -Imagine a robotic turtle starting at (0, 0) in the x-y plane. Give it the +Imagine a robotic turtle starting at (0, 0) in the x-y plane. After an ``import turtle``, give it the command ``turtle.forward(15)``, and it moves (on-screen!) 15 pixels in the direction it is facing, drawing a line as it moves. Give it the command -``turtle.left(25)``, and it rotates in-place 25 degrees clockwise. +``turtle.right(25)``, and it rotates in-place 25 degrees clockwise. .. sidebar:: Turtle star diff --git a/Lib/turtle.py b/Lib/turtle.py --- a/Lib/turtle.py +++ b/Lib/turtle.py @@ -27,10 +27,10 @@ kids. It was part of the original Logo programming language developed by Wally Feurzig and Seymour Papert in 1966. -Imagine a robotic turtle starting at (0, 0) in the x-y plane. Give it +Imagine a robotic turtle starting at (0, 0) in the x-y plane. After an ``import turtle``, give it the command turtle.forward(15), and it moves (on-screen!) 15 pixels in the direction it is facing, drawing a line as it moves. Give it the -command turtle.left(25), and it rotates in-place 25 degrees clockwise. +command turtle.right(25), and it rotates in-place 25 degrees clockwise. By combining together these and similar commands, intricate shapes and pictures can easily be drawn. diff --git a/Lib/turtledemo/about_turtle.txt b/Lib/turtledemo/about_turtle.txt --- a/Lib/turtledemo/about_turtle.txt +++ b/Lib/turtledemo/about_turtle.txt @@ -7,10 +7,10 @@ kids. It was part of the original Logo programming language developed by Wally Feurzig and Seymour Papert in 1966. -Imagine a robotic turtle starting at (0, 0) in the x-y plane. Give it +Imagine a robotic turtle starting at (0, 0) in the x-y plane. After an ``import turtle``, give it the command turtle.forward(15), and it moves (on-screen!) 15 pixels in the direction it is facing, drawing a line as it moves. Give it the -command turtle.left(25), and it rotates in-place 25 degrees clockwise. +command turtle.right(25), and it rotates in-place 25 degrees clockwise. By combining together these and similar commands, intricate shapes and pictures can easily be drawn. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sun Aug 7 17:15:06 2011 From: python-checkins at python.org (sandro.tosi) Date: Sun, 07 Aug 2011 17:15:06 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=28merge_3=2E2_-=3E_default=29?= =?utf8?q?=3A_=2312677=3A_merge_with_3=2E2?= Message-ID: http://hg.python.org/cpython/rev/a7365994f4d6 changeset: 71767:a7365994f4d6 parent: 71762:12ad8c975766 parent: 71766:bf4f65043d7d user: Sandro Tosi date: Sun Aug 07 17:13:13 2011 +0200 summary: #12677: merge with 3.2 files: Doc/library/turtle.rst | 4 ++-- Lib/turtle.py | 4 ++-- Lib/turtledemo/about_turtle.txt | 4 ++-- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/Doc/library/turtle.rst b/Doc/library/turtle.rst --- a/Doc/library/turtle.rst +++ b/Doc/library/turtle.rst @@ -18,10 +18,10 @@ part of the original Logo programming language developed by Wally Feurzig and Seymour Papert in 1966. -Imagine a robotic turtle starting at (0, 0) in the x-y plane. Give it the +Imagine a robotic turtle starting at (0, 0) in the x-y plane. After an ``import turtle``, give it the command ``turtle.forward(15)``, and it moves (on-screen!) 15 pixels in the direction it is facing, drawing a line as it moves. Give it the command -``turtle.left(25)``, and it rotates in-place 25 degrees clockwise. +``turtle.right(25)``, and it rotates in-place 25 degrees clockwise. .. sidebar:: Turtle star diff --git a/Lib/turtle.py b/Lib/turtle.py --- a/Lib/turtle.py +++ b/Lib/turtle.py @@ -27,10 +27,10 @@ kids. It was part of the original Logo programming language developed by Wally Feurzig and Seymour Papert in 1966. -Imagine a robotic turtle starting at (0, 0) in the x-y plane. Give it +Imagine a robotic turtle starting at (0, 0) in the x-y plane. After an ``import turtle``, give it the command turtle.forward(15), and it moves (on-screen!) 15 pixels in the direction it is facing, drawing a line as it moves. Give it the -command turtle.left(25), and it rotates in-place 25 degrees clockwise. +command turtle.right(25), and it rotates in-place 25 degrees clockwise. By combining together these and similar commands, intricate shapes and pictures can easily be drawn. diff --git a/Lib/turtledemo/about_turtle.txt b/Lib/turtledemo/about_turtle.txt --- a/Lib/turtledemo/about_turtle.txt +++ b/Lib/turtledemo/about_turtle.txt @@ -7,10 +7,10 @@ kids. It was part of the original Logo programming language developed by Wally Feurzig and Seymour Papert in 1966. -Imagine a robotic turtle starting at (0, 0) in the x-y plane. Give it +Imagine a robotic turtle starting at (0, 0) in the x-y plane. After an ``import turtle``, give it the command turtle.forward(15), and it moves (on-screen!) 15 pixels in the direction it is facing, drawing a line as it moves. Give it the -command turtle.left(25), and it rotates in-place 25 degrees clockwise. +command turtle.right(25), and it rotates in-place 25 degrees clockwise. By combining together these and similar commands, intricate shapes and pictures can easily be drawn. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Aug 8 00:20:30 2011 From: python-checkins at python.org (sandro.tosi) Date: Mon, 08 Aug 2011 00:20:30 +0200 Subject: [Python-checkins] =?utf8?b?Y3B5dGhvbiAoMi43KTogIzEwNzQxOiBhZGQg?= =?utf8?q?documentation_for_PyGILState=5FGetThisThreadState=28=29?= Message-ID: http://hg.python.org/cpython/rev/8852e4a9aa1e changeset: 71768:8852e4a9aa1e branch: 2.7 parent: 71765:a30e6c4bf486 user: Sandro Tosi date: Mon Aug 08 00:15:57 2011 +0200 summary: #10741: add documentation for PyGILState_GetThisThreadState() files: Doc/c-api/init.rst | 10 ++++++++++ Include/pystate.h | 2 +- 2 files changed, 11 insertions(+), 1 deletions(-) diff --git a/Doc/c-api/init.rst b/Doc/c-api/init.rst --- a/Doc/c-api/init.rst +++ b/Doc/c-api/init.rst @@ -638,6 +638,16 @@ .. versionadded:: 2.3 +.. c:function:: PyThreadState PyGILState_GetThisThreadState() + + Get the current thread state for this thread. May return ``NULL`` if no + GILState API has been used on the current thread. Note that the main thread + always has such a thread-state, even if no auto-thread-state call has been + made on the main thread. This is mainly a helper/diagnostic function. + + .. versionadded:: 2.3 + + The following macros are normally used without a trailing semicolon; look for example usage in the Python source distribution. diff --git a/Include/pystate.h b/Include/pystate.h --- a/Include/pystate.h +++ b/Include/pystate.h @@ -169,7 +169,7 @@ /* Helper/diagnostic function - get the current thread state for this thread. May return NULL if no GILState API has been used - on the current thread. Note the main thread always has such a + on the current thread. Note that the main thread always has such a thread-state, even if no auto-thread-state call has been made on the main thread. */ -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Aug 8 00:20:31 2011 From: python-checkins at python.org (sandro.tosi) Date: Mon, 08 Aug 2011 00:20:31 +0200 Subject: [Python-checkins] =?utf8?b?Y3B5dGhvbiAoMy4yKTogIzEwNzQxOiBhZGQg?= =?utf8?q?documentation_for_PyGILState=5FGetThisThreadState=28=29?= Message-ID: http://hg.python.org/cpython/rev/53b8260b9ea7 changeset: 71769:53b8260b9ea7 branch: 3.2 parent: 71766:bf4f65043d7d user: Sandro Tosi date: Mon Aug 08 00:16:54 2011 +0200 summary: #10741: add documentation for PyGILState_GetThisThreadState() files: Doc/c-api/init.rst | 8 ++++++++ Include/pystate.h | 2 +- 2 files changed, 9 insertions(+), 1 deletions(-) diff --git a/Doc/c-api/init.rst b/Doc/c-api/init.rst --- a/Doc/c-api/init.rst +++ b/Doc/c-api/init.rst @@ -646,6 +646,14 @@ :c:func:`PyGILState_Release` on the same thread. +.. c:function:: PyThreadState PyGILState_GetThisThreadState() + + Get the current thread state for this thread. May return ``NULL`` if no + GILState API has been used on the current thread. Note that the main thread + always has such a thread-state, even if no auto-thread-state call has been + made on the main thread. This is mainly a helper/diagnostic function. + + The following macros are normally used without a trailing semicolon; look for example usage in the Python source distribution. diff --git a/Include/pystate.h b/Include/pystate.h --- a/Include/pystate.h +++ b/Include/pystate.h @@ -195,7 +195,7 @@ /* Helper/diagnostic function - get the current thread state for this thread. May return NULL if no GILState API has been used - on the current thread. Note the main thread always has such a + on the current thread. Note that the main thread always has such a thread-state, even if no auto-thread-state call has been made on the main thread. */ -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Aug 8 00:20:33 2011 From: python-checkins at python.org (sandro.tosi) Date: Mon, 08 Aug 2011 00:20:33 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=28merge_3=2E2_-=3E_default=29?= =?utf8?q?=3A_=2310741=3A_merge_with_3=2E2?= Message-ID: http://hg.python.org/cpython/rev/6adfeec8c482 changeset: 71770:6adfeec8c482 parent: 71767:a7365994f4d6 parent: 71769:53b8260b9ea7 user: Sandro Tosi date: Mon Aug 08 00:17:43 2011 +0200 summary: #10741: merge with 3.2 files: Doc/c-api/init.rst | 8 ++++++++ Include/pystate.h | 2 +- 2 files changed, 9 insertions(+), 1 deletions(-) diff --git a/Doc/c-api/init.rst b/Doc/c-api/init.rst --- a/Doc/c-api/init.rst +++ b/Doc/c-api/init.rst @@ -646,6 +646,14 @@ :c:func:`PyGILState_Release` on the same thread. +.. c:function:: PyThreadState PyGILState_GetThisThreadState() + + Get the current thread state for this thread. May return ``NULL`` if no + GILState API has been used on the current thread. Note that the main thread + always has such a thread-state, even if no auto-thread-state call has been + made on the main thread. This is mainly a helper/diagnostic function. + + The following macros are normally used without a trailing semicolon; look for example usage in the Python source distribution. diff --git a/Include/pystate.h b/Include/pystate.h --- a/Include/pystate.h +++ b/Include/pystate.h @@ -197,7 +197,7 @@ /* Helper/diagnostic function - get the current thread state for this thread. May return NULL if no GILState API has been used - on the current thread. Note the main thread always has such a + on the current thread. Note that the main thread always has such a thread-state, even if no auto-thread-state call has been made on the main thread. */ -- Repository URL: http://hg.python.org/cpython From solipsis at pitrou.net Mon Aug 8 05:26:52 2011 From: solipsis at pitrou.net (solipsis at pitrou.net) Date: Mon, 08 Aug 2011 05:26:52 +0200 Subject: [Python-checkins] Daily reference leaks (6adfeec8c482): sum=0 Message-ID: results for 6adfeec8c482 on branch "default" -------------------------------------------- Command line was: ['./python', '-m', 'test.regrtest', '-uall', '-R', '3:3:/home/antoine/cpython/refleaks/reflogMxtvZs', '-x'] From python-checkins at python.org Mon Aug 8 16:40:46 2011 From: python-checkins at python.org (sandro.tosi) Date: Mon, 08 Aug 2011 16:40:46 +0200 Subject: [Python-checkins] =?utf8?b?Y3B5dGhvbiAoMi43KTogIzEyNzA5OiBhZGQg?= =?utf8?q?error=5Fcallback_argument_to_map=5Fasync_documentation?= Message-ID: http://hg.python.org/cpython/rev/b2835b8412e5 changeset: 71771:b2835b8412e5 branch: 2.7 parent: 71768:8852e4a9aa1e user: Sandro Tosi date: Mon Aug 08 16:37:44 2011 +0200 summary: #12709: add error_callback argument to map_async documentation files: Doc/library/multiprocessing.rst | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Doc/library/multiprocessing.rst b/Doc/library/multiprocessing.rst --- a/Doc/library/multiprocessing.rst +++ b/Doc/library/multiprocessing.rst @@ -1602,7 +1602,7 @@ the process pool as separate tasks. The (approximate) size of these chunks can be specified by setting *chunksize* to a positive integer. - .. method:: map_async(func, iterable[, chunksize[, callback]]) + .. method:: map_async(func, iterable[, chunksize[, callback[, error_callback]]]) A variant of the :meth:`.map` method which returns a result object. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Aug 8 16:40:47 2011 From: python-checkins at python.org (sandro.tosi) Date: Mon, 08 Aug 2011 16:40:47 +0200 Subject: [Python-checkins] =?utf8?b?Y3B5dGhvbiAoMy4yKTogIzEyNzA5OiBhZGQg?= =?utf8?q?error=5Fcallback_argument_to_map=5Fasync_documentation?= Message-ID: http://hg.python.org/cpython/rev/41b816853819 changeset: 71772:41b816853819 branch: 3.2 parent: 71769:53b8260b9ea7 user: Sandro Tosi date: Mon Aug 08 16:38:13 2011 +0200 summary: #12709: add error_callback argument to map_async documentation files: Doc/library/multiprocessing.rst | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Doc/library/multiprocessing.rst b/Doc/library/multiprocessing.rst --- a/Doc/library/multiprocessing.rst +++ b/Doc/library/multiprocessing.rst @@ -1609,7 +1609,7 @@ the process pool as separate tasks. The (approximate) size of these chunks can be specified by setting *chunksize* to a positive integer. - .. method:: map_async(func, iterable[, chunksize[, callback]]) + .. method:: map_async(func, iterable[, chunksize[, callback[, error_callback]]]) A variant of the :meth:`.map` method which returns a result object. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Aug 8 16:40:48 2011 From: python-checkins at python.org (sandro.tosi) Date: Mon, 08 Aug 2011 16:40:48 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=28merge_3=2E2_-=3E_default=29?= =?utf8?q?=3A_=2312709=3A_merge_with_3=2E2?= Message-ID: http://hg.python.org/cpython/rev/0c922e7d26e1 changeset: 71773:0c922e7d26e1 parent: 71770:6adfeec8c482 parent: 71772:41b816853819 user: Sandro Tosi date: Mon Aug 08 16:38:47 2011 +0200 summary: #12709: merge with 3.2 files: Doc/library/multiprocessing.rst | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Doc/library/multiprocessing.rst b/Doc/library/multiprocessing.rst --- a/Doc/library/multiprocessing.rst +++ b/Doc/library/multiprocessing.rst @@ -1628,7 +1628,7 @@ the process pool as separate tasks. The (approximate) size of these chunks can be specified by setting *chunksize* to a positive integer. - .. method:: map_async(func, iterable[, chunksize[, callback]]) + .. method:: map_async(func, iterable[, chunksize[, callback[, error_callback]]]) A variant of the :meth:`.map` method which returns a result object. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Aug 8 21:45:47 2011 From: python-checkins at python.org (georg.brandl) Date: Mon, 08 Aug 2011 21:45:47 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=283=2E2=29=3A_Confirm_that_th?= =?utf8?q?e_prime_example_is_actually_correct=2E__We_get_so_many_complaint?= =?utf8?q?s?= Message-ID: http://hg.python.org/cpython/rev/0e5433cfe404 changeset: 71774:0e5433cfe404 branch: 3.2 parent: 71772:41b816853819 user: Georg Brandl date: Mon Aug 08 21:45:13 2011 +0200 summary: Confirm that the prime example is actually correct. We get so many complaints about a "buggy example" on docs at python, let us hope this cuts them in half at least. files: Doc/tutorial/controlflow.rst | 3 +++ 1 files changed, 3 insertions(+), 0 deletions(-) diff --git a/Doc/tutorial/controlflow.rst b/Doc/tutorial/controlflow.rst --- a/Doc/tutorial/controlflow.rst +++ b/Doc/tutorial/controlflow.rst @@ -184,6 +184,9 @@ 8 equals 2 * 4 9 equals 3 * 3 +(Yes, this is the correct code. Look closely: the ``else`` clause belongs to +the :keyword:`for` loop, **not** the :keyword:`if` statement.) + .. _tut-pass: -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Aug 8 21:45:48 2011 From: python-checkins at python.org (georg.brandl) Date: Mon, 08 Aug 2011 21:45:48 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=28merge_3=2E2_-=3E_default=29?= =?utf8?q?=3A_Merge_with_3=2E2=2E?= Message-ID: http://hg.python.org/cpython/rev/89feee4fb8d6 changeset: 71775:89feee4fb8d6 parent: 71773:0c922e7d26e1 parent: 71774:0e5433cfe404 user: Georg Brandl date: Mon Aug 08 21:45:24 2011 +0200 summary: Merge with 3.2. files: Doc/tutorial/controlflow.rst | 3 +++ 1 files changed, 3 insertions(+), 0 deletions(-) diff --git a/Doc/tutorial/controlflow.rst b/Doc/tutorial/controlflow.rst --- a/Doc/tutorial/controlflow.rst +++ b/Doc/tutorial/controlflow.rst @@ -184,6 +184,9 @@ 8 equals 2 * 4 9 equals 3 * 3 +(Yes, this is the correct code. Look closely: the ``else`` clause belongs to +the :keyword:`for` loop, **not** the :keyword:`if` statement.) + .. _tut-pass: -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Aug 8 21:45:48 2011 From: python-checkins at python.org (georg.brandl) Date: Mon, 08 Aug 2011 21:45:48 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=282=2E7=29=3A_Confirm_that_th?= =?utf8?q?e_prime_example_is_actually_correct=2E__We_get_so_many_complaint?= =?utf8?q?s?= Message-ID: http://hg.python.org/cpython/rev/10f20ad2fbb6 changeset: 71776:10f20ad2fbb6 branch: 2.7 parent: 71771:b2835b8412e5 user: Georg Brandl date: Mon Aug 08 21:45:13 2011 +0200 summary: Confirm that the prime example is actually correct. We get so many complaints about a "buggy example" on docs at python, let us hope this cuts them in half at least. files: Doc/tutorial/controlflow.rst | 3 +++ 1 files changed, 3 insertions(+), 0 deletions(-) diff --git a/Doc/tutorial/controlflow.rst b/Doc/tutorial/controlflow.rst --- a/Doc/tutorial/controlflow.rst +++ b/Doc/tutorial/controlflow.rst @@ -156,6 +156,9 @@ 8 equals 2 * 4 9 equals 3 * 3 +(Yes, this is the correct code. Look closely: the ``else`` clause belongs to +the :keyword:`for` loop, **not** the :keyword:`if` statement.) + .. _tut-pass: -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Aug 8 22:44:31 2011 From: python-checkins at python.org (victor.stinner) Date: Mon, 08 Aug 2011 22:44:31 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Issue_=2312700=3A_fix_test?= =?utf8?q?=5Ffaulthandler_for_Mac_OS_X_Lion?= Message-ID: http://hg.python.org/cpython/rev/1777df2ce50d changeset: 71777:1777df2ce50d parent: 71775:89feee4fb8d6 user: Victor Stinner date: Mon Aug 08 22:43:45 2011 +0200 summary: Issue #12700: fix test_faulthandler for Mac OS X Lion Read from NULL raises SIGILL, not SIGSEGV, on Mac OS X Lion. files: Lib/test/test_faulthandler.py | 9 +++++---- 1 files changed, 5 insertions(+), 4 deletions(-) diff --git a/Lib/test/test_faulthandler.py b/Lib/test/test_faulthandler.py --- a/Lib/test/test_faulthandler.py +++ b/Lib/test/test_faulthandler.py @@ -112,7 +112,8 @@ faulthandler._read_null() """.strip(), 3, - '(?:Segmentation fault|Bus error)') + # Issue #12700: Read NULL raises SIGILL on Mac OS X Lion + '(?:Segmentation fault|Bus error|Illegal instruction)') def test_sigsegv(self): self.check_fatal_error(""" @@ -192,7 +193,7 @@ faulthandler._read_null(True) """.strip(), 3, - '(?:Segmentation fault|Bus error)') + '(?:Segmentation fault|Bus error|Illegal instruction)') def test_enable_file(self): with temporary_filename() as filename: @@ -203,7 +204,7 @@ faulthandler._read_null() """.strip().format(filename=repr(filename)), 4, - '(?:Segmentation fault|Bus error)', + '(?:Segmentation fault|Bus error|Illegal instruction)', filename=filename) def test_enable_single_thread(self): @@ -213,7 +214,7 @@ faulthandler._read_null() """.strip(), 3, - '(?:Segmentation fault|Bus error)', + '(?:Segmentation fault|Bus error|Illegal instruction)', all_threads=False) def test_disable(self): -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Tue Aug 9 01:11:25 2011 From: python-checkins at python.org (ezio.melotti) Date: Tue, 9 Aug 2011 01:11:25 +0200 (CEST) Subject: [Python-checkins] r88878 - in tracker/instances: board/html/_generic.help-list.html board/html/_generic.help.html board/html/issue.item.html board/html/user.index.html jobs/html/_generic.help-list.html jobs/html/_generic.help.html jobs/html/user.index.html jython/html/_generic.help-list.html jython/html/_generic.help.html jython/html/issue.item.html jython/html/user.index.html meta/html/_generic.help-list.html meta/html/_generic.help.html meta/html/issue.item.html meta/html/user.index.html python-dev-spambayes-integration/html/_generic.help-list.html python-dev-spambayes-integration/html/_generic.help.html python-dev-spambayes-integration/html/issue.item.html python-dev-spambayes-integration/html/user.index.html security/html/_generic.help-list.html security/html/_generic.help.html security/html/issue.item.html security/html/user.index.html setuptools/html/_generic.help-list.html setuptools/html/_generic.help.html setuptools/html/issue.item.html setuptools/html/user.index.html Message-ID: <3RXvcs6v5VzMTf@mail.python.org> Author: ezio.melotti Date: Tue Aug 9 01:11:25 2011 New Revision: 88878 Log: #278: update the other tracker instances to 1.4.8. Modified: tracker/instances/board/html/_generic.help-list.html tracker/instances/board/html/_generic.help.html tracker/instances/board/html/issue.item.html tracker/instances/board/html/user.index.html tracker/instances/jobs/html/_generic.help-list.html tracker/instances/jobs/html/_generic.help.html tracker/instances/jobs/html/user.index.html tracker/instances/jython/html/_generic.help-list.html tracker/instances/jython/html/_generic.help.html tracker/instances/jython/html/issue.item.html tracker/instances/jython/html/user.index.html tracker/instances/meta/html/_generic.help-list.html tracker/instances/meta/html/_generic.help.html tracker/instances/meta/html/issue.item.html tracker/instances/meta/html/user.index.html tracker/instances/python-dev-spambayes-integration/html/_generic.help-list.html tracker/instances/python-dev-spambayes-integration/html/_generic.help.html tracker/instances/python-dev-spambayes-integration/html/issue.item.html tracker/instances/python-dev-spambayes-integration/html/user.index.html tracker/instances/security/html/_generic.help-list.html tracker/instances/security/html/_generic.help.html tracker/instances/security/html/issue.item.html tracker/instances/security/html/user.index.html tracker/instances/setuptools/html/_generic.help-list.html tracker/instances/setuptools/html/_generic.help.html tracker/instances/setuptools/html/issue.item.html tracker/instances/setuptools/html/user.index.html Modified: tracker/instances/board/html/_generic.help-list.html ============================================================================== --- tracker/instances/board/html/_generic.help-list.html (original) +++ tracker/instances/board/html/_generic.help-list.html Tue Aug 9 01:11:25 2011 @@ -64,7 +64,7 @@ + tal:content="python:item[prop]"> Modified: tracker/instances/board/html/_generic.help.html ============================================================================== --- tracker/instances/board/html/_generic.help.html (original) +++ tracker/instances/board/html/_generic.help.html Tue Aug 9 01:11:25 2011 @@ -93,7 +93,7 @@ + tal:content="python:item[prop]"> @@ -149,7 +149,7 @@ + tal:content="python:item[prop]"> Modified: tracker/instances/board/html/issue.item.html ============================================================================== --- tracker/instances/board/html/issue.item.html (original) +++ tracker/instances/board/html/issue.item.html Tue Aug 9 01:11:25 2011 @@ -216,7 +216,7 @@ tal:attributes="href string:file${file/id}">edit - @@ -237,7 +237,7 @@ Date: - Modified: tracker/instances/board/html/user.index.html ============================================================================== --- tracker/instances/board/html/user.index.html (original) +++ tracker/instances/board/html/user.index.html Tue Aug 9 01:11:25 2011 @@ -62,9 +62,13 @@       - - retire + + + + + + Modified: tracker/instances/jobs/html/_generic.help-list.html ============================================================================== --- tracker/instances/jobs/html/_generic.help-list.html (original) +++ tracker/instances/jobs/html/_generic.help-list.html Tue Aug 9 01:11:25 2011 @@ -64,7 +64,7 @@ + tal:content="python:item[prop]"> Modified: tracker/instances/jobs/html/_generic.help.html ============================================================================== --- tracker/instances/jobs/html/_generic.help.html (original) +++ tracker/instances/jobs/html/_generic.help.html Tue Aug 9 01:11:25 2011 @@ -93,7 +93,7 @@ + tal:content="python:item[prop]"> @@ -149,7 +149,7 @@ + tal:content="python:item[prop]"> Modified: tracker/instances/jobs/html/user.index.html ============================================================================== --- tracker/instances/jobs/html/user.index.html (original) +++ tracker/instances/jobs/html/user.index.html Tue Aug 9 01:11:25 2011 @@ -62,9 +62,13 @@       - - retire + +
    + + + +
    Modified: tracker/instances/jython/html/_generic.help-list.html ============================================================================== --- tracker/instances/jython/html/_generic.help-list.html (original) +++ tracker/instances/jython/html/_generic.help-list.html Tue Aug 9 01:11:25 2011 @@ -64,7 +64,7 @@ + tal:content="python:item[prop]"> Modified: tracker/instances/jython/html/_generic.help.html ============================================================================== --- tracker/instances/jython/html/_generic.help.html (original) +++ tracker/instances/jython/html/_generic.help.html Tue Aug 9 01:11:25 2011 @@ -93,7 +93,7 @@ + tal:content="python:item[prop]"> @@ -149,7 +149,7 @@ + tal:content="python:item[prop]"> Modified: tracker/instances/jython/html/issue.item.html ============================================================================== --- tracker/instances/jython/html/issue.item.html (original) +++ tracker/instances/jython/html/issue.item.html Tue Aug 9 01:11:25 2011 @@ -222,7 +222,7 @@ tal:attributes="href string:file${file/id}">edit -
    @@ -243,7 +243,7 @@ Date: - Modified: tracker/instances/jython/html/user.index.html ============================================================================== --- tracker/instances/jython/html/user.index.html (original) +++ tracker/instances/jython/html/user.index.html Tue Aug 9 01:11:25 2011 @@ -62,9 +62,13 @@       - - retire + + + + + + Modified: tracker/instances/meta/html/_generic.help-list.html ============================================================================== --- tracker/instances/meta/html/_generic.help-list.html (original) +++ tracker/instances/meta/html/_generic.help-list.html Tue Aug 9 01:11:25 2011 @@ -64,7 +64,7 @@ + tal:content="python:item[prop]"> Modified: tracker/instances/meta/html/_generic.help.html ============================================================================== --- tracker/instances/meta/html/_generic.help.html (original) +++ tracker/instances/meta/html/_generic.help.html Tue Aug 9 01:11:25 2011 @@ -83,7 +83,7 @@ + tal:content="python:item[prop]"> Modified: tracker/instances/meta/html/issue.item.html ============================================================================== --- tracker/instances/meta/html/issue.item.html (original) +++ tracker/instances/meta/html/issue.item.html Tue Aug 9 01:11:25 2011 @@ -151,7 +151,7 @@ tal:attributes="href string:file${file/id}">edit -
    @@ -172,7 +172,7 @@ Date: - Modified: tracker/instances/meta/html/user.index.html ============================================================================== --- tracker/instances/meta/html/user.index.html (original) +++ tracker/instances/meta/html/user.index.html Tue Aug 9 01:11:25 2011 @@ -33,9 +33,13 @@       - - retire + + + + + + Modified: tracker/instances/python-dev-spambayes-integration/html/_generic.help-list.html ============================================================================== --- tracker/instances/python-dev-spambayes-integration/html/_generic.help-list.html (original) +++ tracker/instances/python-dev-spambayes-integration/html/_generic.help-list.html Tue Aug 9 01:11:25 2011 @@ -64,7 +64,7 @@ + tal:content="python:item[prop]"> Modified: tracker/instances/python-dev-spambayes-integration/html/_generic.help.html ============================================================================== --- tracker/instances/python-dev-spambayes-integration/html/_generic.help.html (original) +++ tracker/instances/python-dev-spambayes-integration/html/_generic.help.html Tue Aug 9 01:11:25 2011 @@ -93,7 +93,7 @@ + tal:content="python:item[prop]"> @@ -149,7 +149,7 @@ + tal:content="python:item[prop]"> Modified: tracker/instances/python-dev-spambayes-integration/html/issue.item.html ============================================================================== --- tracker/instances/python-dev-spambayes-integration/html/issue.item.html (original) +++ tracker/instances/python-dev-spambayes-integration/html/issue.item.html Tue Aug 9 01:11:25 2011 @@ -212,7 +212,7 @@ tal:attributes="href string:file${file/id}">edit -
    @@ -233,7 +233,7 @@ Date: - Modified: tracker/instances/python-dev-spambayes-integration/html/user.index.html ============================================================================== --- tracker/instances/python-dev-spambayes-integration/html/user.index.html (original) +++ tracker/instances/python-dev-spambayes-integration/html/user.index.html Tue Aug 9 01:11:25 2011 @@ -62,9 +62,13 @@       - - retire + + + + + + Modified: tracker/instances/security/html/_generic.help-list.html ============================================================================== --- tracker/instances/security/html/_generic.help-list.html (original) +++ tracker/instances/security/html/_generic.help-list.html Tue Aug 9 01:11:25 2011 @@ -64,7 +64,7 @@ + tal:content="python:item[prop]"> Modified: tracker/instances/security/html/_generic.help.html ============================================================================== --- tracker/instances/security/html/_generic.help.html (original) +++ tracker/instances/security/html/_generic.help.html Tue Aug 9 01:11:25 2011 @@ -93,7 +93,7 @@ + tal:content="python:item[prop]"> @@ -149,7 +149,7 @@ + tal:content="python:item[prop]"> Modified: tracker/instances/security/html/issue.item.html ============================================================================== --- tracker/instances/security/html/issue.item.html (original) +++ tracker/instances/security/html/issue.item.html Tue Aug 9 01:11:25 2011 @@ -222,7 +222,7 @@ tal:attributes="href string:file${file/id}">edit -
    @@ -243,7 +243,7 @@ Date: - Modified: tracker/instances/security/html/user.index.html ============================================================================== --- tracker/instances/security/html/user.index.html (original) +++ tracker/instances/security/html/user.index.html Tue Aug 9 01:11:25 2011 @@ -62,9 +62,13 @@       - - retire + + + + + + Modified: tracker/instances/setuptools/html/_generic.help-list.html ============================================================================== --- tracker/instances/setuptools/html/_generic.help-list.html (original) +++ tracker/instances/setuptools/html/_generic.help-list.html Tue Aug 9 01:11:25 2011 @@ -64,7 +64,7 @@ + tal:content="python:item[prop]"> Modified: tracker/instances/setuptools/html/_generic.help.html ============================================================================== --- tracker/instances/setuptools/html/_generic.help.html (original) +++ tracker/instances/setuptools/html/_generic.help.html Tue Aug 9 01:11:25 2011 @@ -83,7 +83,7 @@ + tal:content="python:item[prop]"> Modified: tracker/instances/setuptools/html/issue.item.html ============================================================================== --- tracker/instances/setuptools/html/issue.item.html (original) +++ tracker/instances/setuptools/html/issue.item.html Tue Aug 9 01:11:25 2011 @@ -151,7 +151,7 @@ tal:attributes="href string:file${file/id}">edit -
    @@ -172,7 +172,7 @@ Date: - Modified: tracker/instances/setuptools/html/user.index.html ============================================================================== --- tracker/instances/setuptools/html/user.index.html (original) +++ tracker/instances/setuptools/html/user.index.html Tue Aug 9 01:11:25 2011 @@ -33,9 +33,13 @@       - - retire + + + + + + From python-checkins at python.org Tue Aug 9 01:49:29 2011 From: python-checkins at python.org (mark.hammond) Date: Tue, 09 Aug 2011 01:49:29 +0200 Subject: [Python-checkins] =?utf8?q?peps=3A_fix_typo?= Message-ID: http://hg.python.org/peps/rev/511687b4bc6f changeset: 3918:511687b4bc6f user: Mark Hammond date: Tue Aug 09 09:48:49 2011 +1000 summary: fix typo files: pep-0397.txt | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/pep-0397.txt b/pep-0397.txt --- a/pep-0397.txt +++ b/pep-0397.txt @@ -316,7 +316,7 @@ a 32bit launcher would be unable to load the 64bit version of Python and vice-versa. - Given these considerations, the launcher will execute it's command in a + Given these considerations, the launcher will execute its command in a child process, remaining alive while the child process is executing, then terminate with the same exit code as returned by the child. To address concerns regarding the termination of the launcher not killing the child, -- Repository URL: http://hg.python.org/peps From solipsis at pitrou.net Tue Aug 9 05:25:51 2011 From: solipsis at pitrou.net (solipsis at pitrou.net) Date: Tue, 09 Aug 2011 05:25:51 +0200 Subject: [Python-checkins] Daily reference leaks (1777df2ce50d): sum=0 Message-ID: results for 1777df2ce50d on branch "default" -------------------------------------------- Command line was: ['./python', '-m', 'test.regrtest', '-uall', '-R', '3:3:/home/antoine/cpython/refleaks/reflogKcp4us', '-x'] From python-checkins at python.org Tue Aug 9 05:36:42 2011 From: python-checkins at python.org (ezio.melotti) Date: Tue, 9 Aug 2011 05:36:42 +0200 (CEST) Subject: [Python-checkins] r88879 - tracker/instances/python-dev/html/issue.item.html Message-ID: <3RY1Vy1cZhzMMY@mail.python.org> Author: ezio.melotti Date: Tue Aug 9 05:36:42 2011 New Revision: 88879 Log: #413: Fix the remove button for hgrepos. Modified: tracker/instances/python-dev/html/issue.item.html Modified: tracker/instances/python-dev/html/issue.item.html ============================================================================== --- tracker/instances/python-dev/html/issue.item.html (original) +++ tracker/instances/python-dev/html/issue.item.html Tue Aug 9 05:36:42 2011 @@ -281,7 +281,7 @@
    - +
    From python-checkins at python.org Tue Aug 9 13:37:36 2011 From: python-checkins at python.org (nick.coghlan) Date: Tue, 09 Aug 2011 13:37:36 +0200 Subject: [Python-checkins] =?utf8?q?peps=3A_Fix_error_in_examples_pointed_?= =?utf8?q?out_by_Nicco_Kunzmann_and_mention_Haskell?= Message-ID: http://hg.python.org/peps/rev/e05c32e3cd1e changeset: 3919:e05c32e3cd1e user: Nick Coghlan date: Tue Aug 09 21:37:23 2011 +1000 summary: Fix error in examples pointed out by Nicco Kunzmann and mention Haskell influence files: pep-3150.txt | 13 +++++++------ 1 files changed, 7 insertions(+), 6 deletions(-) diff --git a/pep-3150.txt b/pep-3150.txt --- a/pep-3150.txt +++ b/pep-3150.txt @@ -36,9 +36,10 @@ argument hack"). The specific proposal in this PEP has been informed by various explorations -of this and related concepts over the years (e.g. [1], [2], [3], [6]). It avoids -some pitfalls that have been encountered in the past, but has not yet itself -been subject to the test of implementation. +of this and related concepts over the years (e.g. [1], [2], [3], [6]), and is +inspired to some degree by the ``where`` and ``let`` clauses in Haskell. It +avoids some problems that have been identified in past proposals, but has not +yet itself been subject to the test of implementation. PEP Deferral @@ -363,7 +364,7 @@ def f(): return i seq.append(f) - assert seq == [9]*10 + assert [f() for f in seq] == [9]*10 # Current Python (early binding via default argument hack) seq = [] @@ -371,7 +372,7 @@ def f(_i=i): return i seq.append(f) - assert seq == list(range(10)) + assert [f() for f in seq] == list(range(10)) # Early binding via given clause seq = [] @@ -379,7 +380,7 @@ seq.append(f) given: def f(): return i - assert seq == list(range(10)) + assert [f() for f in seq] == list(range(10)) Note that the current intention is for the copy-in/copy-out semantics to apply only to names defined in the local scope containing the ``given`` -- Repository URL: http://hg.python.org/peps From python-checkins at python.org Tue Aug 9 18:11:13 2011 From: python-checkins at python.org (eric.araujo) Date: Tue, 09 Aug 2011 18:11:13 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=283=2E2=29=3A_Improve_documen?= =?utf8?q?tation_for_PEP_370_support_in_site_module_=28=238617=29=2E?= Message-ID: http://hg.python.org/cpython/rev/74179a79d673 changeset: 71778:74179a79d673 branch: 3.2 parent: 71746:cc86f4ca5020 user: ?ric Araujo date: Sat Aug 06 01:51:07 2011 +0200 summary: Improve documentation for PEP 370 support in site module (#8617). site.USER_BASE and site.USER_SITE are now fully documented. PEP 370 is outdated with respects to the Mac framework situation, but the code in sysconfig and the example in the 3.2 What?s New document helped me find the right values to document for Mac OS X. The command-line interface of the site module, partly documented in the 3.2 What?s New, is fully described in the module docs. The purpose of the usercustomize module is explained in the site docs, with a gentle introduction in the tutorial (right after the section that talks about PYTHONSTARTUP; a comment mentions it should be moved from the tutorial to another file, but that will be another bug). Various markup and wording improvements were made along the way in the site module docs. Duplicate and incomplete declarations of environment variables have also been removed (the original bug report was actually about these entries :). The site module docs are still a bit messy; I?ll see about improving them for #11553. All these sections are copiously interlinked and findable from the doc indexes. files: Doc/library/site.rst | 124 ++++++++++++++++------ Doc/tutorial/interactive.rst | 7 +- Doc/tutorial/interpreter.rst | 24 ++++- Doc/using/cmdline.rst | 10 +- 4 files changed, 121 insertions(+), 44 deletions(-) diff --git a/Doc/library/site.rst b/Doc/library/site.rst --- a/Doc/library/site.rst +++ b/Doc/library/site.rst @@ -2,18 +2,21 @@ ================================================ .. module:: site - :synopsis: A standard way to reference site-specific modules. + :synopsis: Module responsible for site-specific configuration. **Source code:** :source:`Lib/site.py` -------------- +.. highlightlang:: none + **This module is automatically imported during initialization.** The automatic import can be suppressed using the interpreter's :option:`-S` option. .. index:: triple: module; search; path -Importing this module will append site-specific paths to the module search path. +Importing this module will append site-specific paths to the module search path +and add a few builtins. .. index:: pair: site-python; directory @@ -28,11 +31,11 @@ if it refers to an existing directory, and if so, adds it to ``sys.path`` and also inspects the newly added path for configuration files. -A path configuration file is a file whose name has the form :file:`package.pth` +A path configuration file is a file whose name has the form :file:`{name}.pth` and exists in one of the four directories mentioned above; its contents are additional items (one per line) to be added to ``sys.path``. Non-existing items -are never added to ``sys.path``, but no check is made that the item refers to a -directory (rather than a file). No item is added to ``sys.path`` more than +are never added to ``sys.path``, and no check is made that the item refers to a +directory rather than a file. No item is added to ``sys.path`` more than once. Blank lines and lines beginning with ``#`` are skipped. Lines starting with ``import`` (followed by space or tab) are executed. @@ -42,8 +45,7 @@ For example, suppose ``sys.prefix`` and ``sys.exec_prefix`` are set to :file:`/usr/local`. The Python X.Y library is then installed in -:file:`/usr/local/lib/python{X.Y}` (where only the first three characters of -``sys.version`` are used to form the installation path name). Suppose this has +:file:`/usr/local/lib/python{X.Y}`. Suppose this has a subdirectory :file:`/usr/local/lib/python{X.Y}/site-packages` with three subsubdirectories, :file:`foo`, :file:`bar` and :file:`spam`, and two path configuration files, :file:`foo.pth` and :file:`bar.pth`. Assume @@ -76,74 +78,122 @@ After these path manipulations, an attempt is made to import a module named :mod:`sitecustomize`, which can perform arbitrary site-specific customizations. -If this import fails with an :exc:`ImportError` exception, it is silently -ignored. +It is typically created by a system administrator in the site-packages +directory. If this import fails with an :exc:`ImportError` exception, it is +silently ignored. -.. index:: module: sitecustomize +.. index:: module: usercustomize + +After this, an attempt is made to import a module named :mod:`usercustomize`, +which can perform arbitrary user-specific customizations, if +:data:`ENABLE_USER_SITE` is true. This file is intended to be created in the +user site-packages directory (see below), which is part of ``sys.path`` unless +disabled by :option:`-s`. An :exc:`ImportError` will be silently ignored. Note that for some non-Unix systems, ``sys.prefix`` and ``sys.exec_prefix`` are empty, and the path manipulations are skipped; however the import of -:mod:`sitecustomize` is still attempted. +:mod:`sitecustomize` and :mod:`usercustomize` is still attempted. .. data:: PREFIXES - A list of prefixes for site package directories + A list of prefixes for site-packages directories. .. data:: ENABLE_USER_SITE - Flag showing the status of the user site directory. True means the - user site directory is enabled and added to sys.path. When the flag - is None the user site directory is disabled for security reasons. + Flag showing the status of the user site-packages directory. ``True`` means + that it is enabled and was added to ``sys.path``. ``False`` means that it + was disabled by user request (with :option:`-s` or + :envvar:`PYTHONNOUSERSITE`). ``None`` means it was disabled for security + reasons (mismatch between user or group id and effective id) or by an + administrator. .. data:: USER_SITE - Path to the user site directory for the current Python version or None + Path to the user site-packages for the running Python. Can be ``None`` if + :func:`getusersitepackages` hasn't been called yet. Default value is + :file:`~/.local/lib/python{X.Y}/site-packages` for UNIX and non-framework Mac + OS X builds, :file:`~/Library/Python/{X.Y}/lib/python/site-packages` for Mac + framework builds, and :file:`{%APPDATA%}\\Python\\Python{XY}\\site-packages` + on Windows. This directory is a site directory, which means that + :file:`.pth` files in it will be processed. .. data:: USER_BASE - Path to the base directory for user site directories - - -.. envvar:: PYTHONNOUSERSITE - - -.. envvar:: PYTHONUSERBASE + Path to the base directory for the user site-packages. Can be ``None`` if + :func:`getuserbase` hasn't been called yet. Default value is + :file:`~/.local` for UNIX and Mac OS X non-framework builds, + :file:`~/Library/Python/{X.Y}` for Mac framework builds, and + :file:`{%APPDATA%}\\Python` for Windows. This value is used by Distutils to + compute the installation directories for scripts, data files, Python modules, + etc. See also :envvar:`PYTHONUSERBASE`. .. function:: addsitedir(sitedir, known_paths=None) - Adds a directory to sys.path and processes its pth files. + Add a directory to sys.path and process its :file:`.pth` files. + .. function:: getsitepackages() - Returns a list containing all global site-packages directories - (and possibly site-python). + Return a list containing all global site-packages directories (and possibly + site-python). .. versionadded:: 3.2 + .. function:: getuserbase() - Returns the "user base" directory path. - - The "user base" directory can be used to store data. If the global - variable ``USER_BASE`` is not initialized yet, this function will also set - it. + Return the path of the user base directory, :data:`USER_BASE`. If it is not + initialized yet, this function will also set it, respecting + :envvar:`PYTHONUSERBASE`. .. versionadded:: 3.2 + .. function:: getusersitepackages() - Returns the user-specific site-packages directory path. - - If the global variable ``USER_SITE`` is not initialized yet, this - function will also set it. + Return the path of the user-specific site-packages directory, + :data:`USER_SITE`. If it is not initialized yet, this function will also set + it, respecting :envvar:`PYTHONNOUSERSITE` and :data:`USER_BASE`. .. versionadded:: 3.2 -.. XXX Update documentation -.. XXX document python -m site --user-base --user-site +The :mod:`site` module also provides a way to get the user directories from the +command line: + +.. code-block:: sh + + $ python3 -m site --user-site + /home/user/.local/lib/python3.3/site-packages + +.. program:: site + +If it is called without arguments, it will print the contents of +:data:`sys.path` on the standard output, followed by the value of +:data:`USER_BASE` and whether the directory exists, then the same thing for +:data:`USER_SITE`, and finally the value of :data:`ENABLE_USER_SITE`. + +.. cmdoption:: --user-base + + Print the path to the user base directory. + +.. cmdoption:: --user-site + + Print the path to the user site-packages directory. + +If both options are given, user base and user site will be printed (always in +this order), separated by :data:`os.pathsep`. + +If any option is given, the script will exit with one of these values: ``O`` if +the user site-packages directory is enabled, ``1`` if it was disabled by the +user, ``2`` if it is disabled for security reasons or by an administrator, and a +value greater than 2 if there is an error. + +.. seealso:: + + :pep:`370` -- Per user site-packages directory diff --git a/Doc/tutorial/interactive.rst b/Doc/tutorial/interactive.rst --- a/Doc/tutorial/interactive.rst +++ b/Doc/tutorial/interactive.rst @@ -156,17 +156,18 @@ quotes, etc., would also be useful. One alternative enhanced interactive interpreter that has been around for quite -some time is `IPython`_, which features tab completion, object exploration and +some time is IPython_, which features tab completion, object exploration and advanced history management. It can also be thoroughly customized and embedded into other applications. Another similar enhanced interactive environment is -`bpython`_. +bpython_. .. rubric:: Footnotes .. [#] Python will execute the contents of a file identified by the :envvar:`PYTHONSTARTUP` environment variable when you start an interactive - interpreter. + interpreter. To customize Python even for non-interactive mode, see + :ref:`tut-customize`. .. _GNU Readline: http://tiswww.case.edu/php/chet/readline/rltop.html diff --git a/Doc/tutorial/interpreter.rst b/Doc/tutorial/interpreter.rst --- a/Doc/tutorial/interpreter.rst +++ b/Doc/tutorial/interpreter.rst @@ -236,6 +236,29 @@ exec(open(filename).read()) +.. _tut-customize: + +The Customization Modules +------------------------- + +Python provides two hooks to let you customize it: :mod:`sitecustomize` and +:mod:`usercustomize`. To see how it works, you need first to find the location +of your user site-packages directory. Start Python and run this code: + + >>> import site + >>> site.getusersitepackages() + '/home/user/.local/lib/python3.2/site-packages' + +Now you can create a file named :file:`usercustomize.py` in that directory and +put anything you want in it. It will affect every invocation of Python, unless +it is started with the :option:`-s` option to disable the automatic import. + +:mod:`sitecustomize` works in the same way, but is typically created by an +administrator of the computer in the global site-packages directory, and is +imported before :mod:`usercustomize`. See the documentation of the :mod:`site` +module for more details. + + .. rubric:: Footnotes .. [#] On Unix, the Python 3.x interpreter is by default not installed with the @@ -243,4 +266,3 @@ simultaneously installed Python 2.x executable. .. [#] A problem with the GNU Readline package may prevent this. - diff --git a/Doc/using/cmdline.rst b/Doc/using/cmdline.rst --- a/Doc/using/cmdline.rst +++ b/Doc/using/cmdline.rst @@ -229,7 +229,8 @@ .. cmdoption:: -s - Don't add user site directory to sys.path + Don't add the :data:`user site-packages directory ` to + :data:`sys.path`. .. seealso:: @@ -468,7 +469,8 @@ .. envvar:: PYTHONNOUSERSITE - If this is set, Python won't add the user site directory to sys.path + If this is set, Python won't add the :data:`user site-packages directory + ` to :data:`sys.path`. .. seealso:: @@ -477,7 +479,9 @@ .. envvar:: PYTHONUSERBASE - Sets the base directory for the user site directory + Defines the :data:`user base directory `, which is used to + compute the path of the :data:`user site-packages directory ` + and Distutils installation paths for ``python setup.py install --user``. .. seealso:: -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Tue Aug 9 18:11:14 2011 From: python-checkins at python.org (eric.araujo) Date: Tue, 09 Aug 2011 18:11:14 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=283=2E2=29=3A_Add_documentati?= =?utf8?q?on_for_PEP_370_features_in_distutils_=28=2310745=29=2E?= Message-ID: http://hg.python.org/cpython/rev/1b0b5f644090 changeset: 71779:1b0b5f644090 branch: 3.2 user: ?ric Araujo date: Sat Aug 06 16:30:42 2011 +0200 summary: Add documentation for PEP 370 features in distutils (#10745). This started out as an easy task, just add a section describing this alternate scheme, but I found a lot of cleanup to do along the way: - fixed inverted reST targets - fixed entries for modules (hi abiflags!) or data files - avoided duplicating the same options listing five or six times - added missing entries for C headers locations - added documentation for --install-lib - fixed a few misuses of the option role (see #9312), but not all (not worth the time, but will do it in packaging docs) - fixed some markup The paths fixes were done with an eye on the source code in the install command, so they really describe what?s actually done. The situation on Mac OS X is rather messy: the fix for #8084 touched site and sysconfig, but distutils doesn?t use these files. I suspect we have a mismatched stdlib at the moment, and the fix is not even clear (see the bug report for further discussion). files: Doc/install/index.rst | 184 +++++++++++++++++++++-------- Doc/library/site.rst | 6 +- Doc/using/cmdline.rst | 3 +- 3 files changed, 137 insertions(+), 56 deletions(-) diff --git a/Doc/install/index.rst b/Doc/install/index.rst --- a/Doc/install/index.rst +++ b/Doc/install/index.rst @@ -279,6 +279,14 @@ >>> sys.exec_prefix '/usr' +A few other placeholders are used in this document: :file:`{X.Y}` stands for the +version of Python, for example ``3.2``; :file:`{abiflags}` will be replaced by +the value of :data:`sys.abiflags` or the empty string for platforms which don't +define ABI flags; :file:`{distname}` will be replaced by the name of the module +distribution being installed. Dots and capitalization are important in the +paths; for example, a value that uses ``python3.2`` on UNIX will typically use +``Python32`` on Windows. + If you don't want to install modules to the standard location, or if you don't have permission to write there, then you need to read about alternate installations in section :ref:`inst-alt-install`. If you want to customize your @@ -307,8 +315,61 @@ differ across platforms, so read whichever of the following sections applies to you. +Note that the various alternate installation schemes are mutually exclusive: you +can pass ``--user``, or ``--home``, or ``--prefix`` and ``--exec-prefix``, or +``--install-base`` and ``--install-platbase``, but you can't mix from these +groups. -.. _inst-alt-install-prefix: + +.. _inst-alt-install-user: + +Alternate installation: the user scheme +--------------------------------------- + +This scheme is designed to be the most convenient solution for users that don't +have write permission to the global site-packages directory or don't want to +install into it. It is enabled with a simple option:: + + python setup.py install --user + +Files will be installed into subdirectories of :data:`site.USER_BASE` (written +as :file:`{userbase}` hereafter). This scheme installs pure Python modules and +extension modules in the same location (also known as :data:`site.USER_SITE`). +Here are the values for UNIX, including Mac OS X: + +=============== =========================================================== +Type of file Installation directory +=============== =========================================================== +modules :file:`{userbase}/lib/python{X.Y}/site-packages` +scripts :file:`{userbase}/bin` +data :file:`{userbase}` +C headers :file:`{userbase}/include/python{X.Y}{abiflags}/{distname}` +=============== =========================================================== + +And here are the values used on Windows: + +=============== =========================================================== +Type of file Installation directory +=============== =========================================================== +modules :file:`{userbase}\\Python{XY}\\site-packages` +scripts :file:`{userbase}\\Scripts` +data :file:`{userbase}` +C headers :file:`{userbase}\\Python{XY}\\Include\\{distname}` +=============== =========================================================== + +The advantage of using this scheme compared to the other ones described below is +that the user site-packages directory is under normal conditions always included +in :data:`sys.path` (see :mod:`site` for more information), which means that +there is no additional step to perform after running the :file:`setup.py` script +to finalize the installation. + +The :command:`build_ext` command also has a ``--user`` option to add +:file:`{userbase}/include` to the compiler search path for header files and +:file:`{userbase}/lib` to the compiler search path for libraries as well as to +the runtime search path for shared C libraries (rpath). + + +.. _inst-alt-install-home: Alternate installation: the home scheme --------------------------------------- @@ -330,23 +391,27 @@ python setup.py install --home=~ +To make Python find the distributions installed with this scheme, you may have +to :ref:`modify Python's search path ` or edit +:mod:`sitecustomize` (see :mod:`site`) to call :func:`site.addsitedir` or edit +:data:`sys.path`. + The :option:`--home` option defines the installation base directory. Files are installed to the following directories under the installation base as follows: -+------------------------------+---------------------------+-----------------------------+ -| Type of file | Installation Directory | Override option | -+==============================+===========================+=============================+ -| pure module distribution | :file:`{home}/lib/python` | :option:`--install-purelib` | -+------------------------------+---------------------------+-----------------------------+ -| non-pure module distribution | :file:`{home}/lib/python` | :option:`--install-platlib` | -+------------------------------+---------------------------+-----------------------------+ -| scripts | :file:`{home}/bin` | :option:`--install-scripts` | -+------------------------------+---------------------------+-----------------------------+ -| data | :file:`{home}/share` | :option:`--install-data` | -+------------------------------+---------------------------+-----------------------------+ +=============== =========================================================== +Type of file Installation directory +=============== =========================================================== +modules :file:`{home}/lib/python` +scripts :file:`{home}/bin` +data :file:`{home}` +C headers :file:`{home}/include/python/{distname}` +=============== =========================================================== +(Mentally replace slashes with backslashes if you're on Windows.) -.. _inst-alt-install-home: + +.. _inst-alt-install-prefix-unix: Alternate installation: Unix (the prefix scheme) ------------------------------------------------ @@ -355,7 +420,7 @@ perform the build/install (i.e., to run the setup script), but install modules into the third-party module directory of a different Python installation (or something that looks like a different Python installation). If this sounds a -trifle unusual, it is---that's why the "home scheme" comes first. However, +trifle unusual, it is---that's why the user and home schemes come before. However, there are at least two known cases where the prefix scheme will be useful. First, consider that many Linux distributions put Python in :file:`/usr`, rather @@ -383,17 +448,15 @@ executables, etc.) If :option:`--exec-prefix` is not supplied, it defaults to :option:`--prefix`. Files are installed as follows: -+------------------------------+-----------------------------------------------------+-----------------------------+ -| Type of file | Installation Directory | Override option | -+==============================+=====================================================+=============================+ -| pure module distribution | :file:`{prefix}/lib/python{X.Y}/site-packages` | :option:`--install-purelib` | -+------------------------------+-----------------------------------------------------+-----------------------------+ -| non-pure module distribution | :file:`{exec-prefix}/lib/python{X.Y}/site-packages` | :option:`--install-platlib` | -+------------------------------+-----------------------------------------------------+-----------------------------+ -| scripts | :file:`{prefix}/bin` | :option:`--install-scripts` | -+------------------------------+-----------------------------------------------------+-----------------------------+ -| data | :file:`{prefix}/share` | :option:`--install-data` | -+------------------------------+-----------------------------------------------------+-----------------------------+ +================= ========================================================== +Type of file Installation directory +================= ========================================================== +Python modules :file:`{prefix}/lib/python{X.Y}/site-packages` +extension modules :file:`{exec-prefix}/lib/python{X.Y}/site-packages` +scripts :file:`{prefix}/bin` +data :file:`{prefix}` +C headers :file:`{prefix}/include/python{X.Y}{abiflags}/{distname}` +================= ========================================================== There is no requirement that :option:`--prefix` or :option:`--exec-prefix` actually point to an alternate Python installation; if the directories listed @@ -418,7 +481,7 @@ alternate Python installation, this is immaterial.) -.. _inst-alt-install-windows: +.. _inst-alt-install-prefix-windows: Alternate installation: Windows (the prefix scheme) --------------------------------------------------- @@ -433,20 +496,18 @@ to install modules to the :file:`\\Temp\\Python` directory on the current drive. The installation base is defined by the :option:`--prefix` option; the -:option:`--exec-prefix` option is not supported under Windows. Files are -installed as follows: +:option:`--exec-prefix` option is not supported under Windows, which means that +pure Python modules and extension modules are installed into the same location. +Files are installed as follows: -+------------------------------+---------------------------+-----------------------------+ -| Type of file | Installation Directory | Override option | -+==============================+===========================+=============================+ -| pure module distribution | :file:`{prefix}` | :option:`--install-purelib` | -+------------------------------+---------------------------+-----------------------------+ -| non-pure module distribution | :file:`{prefix}` | :option:`--install-platlib` | -+------------------------------+---------------------------+-----------------------------+ -| scripts | :file:`{prefix}\\Scripts` | :option:`--install-scripts` | -+------------------------------+---------------------------+-----------------------------+ -| data | :file:`{prefix}\\Data` | :option:`--install-data` | -+------------------------------+---------------------------+-----------------------------+ +=============== ========================================================== +Type of file Installation directory +=============== ========================================================== +modules :file:`{prefix}\\Lib\\site-packages` +scripts :file:`{prefix}\\Scripts` +data :file:`{prefix}` +C headers :file:`{prefix}\\Include\\{distname}` +=============== ========================================================== .. _inst-custom-install: @@ -460,13 +521,29 @@ or you might want to completely redefine the installation scheme. In either case, you're creating a *custom installation scheme*. -You probably noticed the column of "override options" in the tables describing -the alternate installation schemes above. Those options are how you define a -custom installation scheme. These override options can be relative, absolute, +To create a custom installation scheme, you start with one of the alternate +schemes and override some of the installation directories used for the various +types of files, using these options: + +====================== ======================= +Type of file Override option +====================== ======================= +Python modules ``--install-purelib`` +extension modules ``--install-platlib`` +all modules ``--install-lib`` +scripts ``--install-scripts`` +data ``--install-data`` +C headers ``--install-headers`` +====================== ======================= + +These override options can be relative, absolute, or explicitly defined in terms of one of the installation base directories. (There are two installation base directories, and they are normally the same--- they only differ when you use the Unix "prefix scheme" and supply different -:option:`--prefix` and :option:`--exec-prefix` options.) +``--prefix`` and ``--exec-prefix`` options; using ``--install-lib`` will +override values computed or given for ``--install-purelib`` and +``--install-platlib``, and is recommended for schemes that don't make a +difference between Python and extension modules.) For example, say you're installing a module distribution to your home directory under Unix---but you want scripts to go in :file:`~/scripts` rather than @@ -493,15 +570,16 @@ a subdirectory of :file:`{prefix}`, rather than right in :file:`{prefix}` itself. This is almost as easy as customizing the script installation directory ---you just have to remember that there are two types of modules to worry about, -pure modules and non-pure modules (i.e., modules from a non-pure distribution). -For example:: +Python and extension modules, which can conveniently be both controlled by one +option:: - python setup.py install --install-purelib=Site --install-platlib=Site + python setup.py install --install-lib=Site -The specified installation directories are relative to :file:`{prefix}`. Of -course, you also have to ensure that these directories are in Python's module -search path, such as by putting a :file:`.pth` file in :file:`{prefix}`. See -section :ref:`inst-search-path` to find out how to modify Python's search path. +The specified installation directory is relative to :file:`{prefix}`. Of +course, you also have to ensure that this directory is in Python's module +search path, such as by putting a :file:`.pth` file in a site directory (see +:mod:`site`). See section :ref:`inst-search-path` to find out how to modify +Python's search path. If you want to define an entire installation scheme, you just have to supply all of the installation directory options. The recommended way to do this is to @@ -553,8 +631,8 @@ python setup.py install --install-base=/tmp -would install pure modules to :file:`{/tmp/python/lib}` in the first case, and -to :file:`{/tmp/lib}` in the second case. (For the second case, you probably +would install pure modules to :file:`/tmp/python/lib` in the first case, and +to :file:`/tmp/lib` in the second case. (For the second case, you probably want to supply an installation base of :file:`/tmp/python`.) You probably noticed the use of ``$HOME`` and ``$PLAT`` in the sample @@ -571,7 +649,7 @@ needed on those platforms? -.. XXX I'm not sure where this section should go. +.. XXX Move this to Doc/using .. _inst-search-path: diff --git a/Doc/library/site.rst b/Doc/library/site.rst --- a/Doc/library/site.rst +++ b/Doc/library/site.rst @@ -129,12 +129,14 @@ :file:`~/Library/Python/{X.Y}` for Mac framework builds, and :file:`{%APPDATA%}\\Python` for Windows. This value is used by Distutils to compute the installation directories for scripts, data files, Python modules, - etc. See also :envvar:`PYTHONUSERBASE`. + etc. for the :ref:`user installation scheme `. See + also :envvar:`PYTHONUSERBASE`. .. function:: addsitedir(sitedir, known_paths=None) - Add a directory to sys.path and process its :file:`.pth` files. + Add a directory to sys.path and process its :file:`.pth` files. Typically + used in :mod:`sitecustomize` or :mod:`usercustomize` (see above). .. function:: getsitepackages() diff --git a/Doc/using/cmdline.rst b/Doc/using/cmdline.rst --- a/Doc/using/cmdline.rst +++ b/Doc/using/cmdline.rst @@ -481,7 +481,8 @@ Defines the :data:`user base directory `, which is used to compute the path of the :data:`user site-packages directory ` - and Distutils installation paths for ``python setup.py install --user``. + and :ref:`Distutils installation paths ` for ``python + setup.py install --user``. .. seealso:: -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Tue Aug 9 18:11:15 2011 From: python-checkins at python.org (eric.araujo) Date: Tue, 09 Aug 2011 18:11:15 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=28merge_3=2E2_-=3E_default=29?= =?utf8?b?OiBNZXJnZSBkb2MgY2hhbmdlcyBmcm9tIDMuMiAoIzg2MTcsICMxMDc0NSku?= Message-ID: http://hg.python.org/cpython/rev/d354802a637d changeset: 71780:d354802a637d parent: 71748:0c1c9bb590a9 parent: 71779:1b0b5f644090 user: ?ric Araujo date: Sat Aug 06 16:58:15 2011 +0200 summary: Merge doc changes from 3.2 (#8617, #10745). In the install and library docs, I changed the text to refer to packaging instead of distutils. I also checked that the documented paths correctly reflect what?s really defined in sysconfig; the main difference with paths defined in distutils.install is that include directories don?t end with the distribution name anymore (i.e. distutils uses include/python3.3/spam, sysconfig include/python3.3), I have no idea why. files: Doc/distutils/install.rst | 184 +++++++++++++++------ Doc/install/install.rst | 193 ++++++++++++++++------ Doc/library/site.rst | 133 ++++++++++---- Doc/packaging/commandref.rst | 15 +- Doc/tutorial/interactive.rst | 7 +- Doc/tutorial/interpreter.rst | 24 ++- Doc/using/cmdline.rst | 11 +- 7 files changed, 408 insertions(+), 159 deletions(-) diff --git a/Doc/distutils/install.rst b/Doc/distutils/install.rst --- a/Doc/distutils/install.rst +++ b/Doc/distutils/install.rst @@ -279,6 +279,14 @@ >>> sys.exec_prefix '/usr' +A few other placeholders are used in this document: :file:`{X.Y}` stands for the +version of Python, for example ``3.2``; :file:`{abiflags}` will be replaced by +the value of :data:`sys.abiflags` or the empty string for platforms which don't +define ABI flags; :file:`{distname}` will be replaced by the name of the module +distribution being installed. Dots and capitalization are important in the +paths; for example, a value that uses ``python3.2`` on UNIX will typically use +``Python32`` on Windows. + If you don't want to install modules to the standard location, or if you don't have permission to write there, then you need to read about alternate installations in section :ref:`inst-alt-install`. If you want to customize your @@ -307,8 +315,61 @@ differ across platforms, so read whichever of the following sections applies to you. +Note that the various alternate installation schemes are mutually exclusive: you +can pass ``--user``, or ``--home``, or ``--prefix`` and ``--exec-prefix``, or +``--install-base`` and ``--install-platbase``, but you can't mix from these +groups. -.. _inst-alt-install-prefix: + +.. _inst-alt-install-user: + +Alternate installation: the user scheme +--------------------------------------- + +This scheme is designed to be the most convenient solution for users that don't +have write permission to the global site-packages directory or don't want to +install into it. It is enabled with a simple option:: + + python setup.py install --user + +Files will be installed into subdirectories of :data:`site.USER_BASE` (written +as :file:`{userbase}` hereafter). This scheme installs pure Python modules and +extension modules in the same location (also known as :data:`site.USER_SITE`). +Here are the values for UNIX, including Mac OS X: + +=============== =========================================================== +Type of file Installation directory +=============== =========================================================== +modules :file:`{userbase}/lib/python{X.Y}/site-packages` +scripts :file:`{userbase}/bin` +data :file:`{userbase}` +C headers :file:`{userbase}/include/python{X.Y}{abiflags}/{distname}` +=============== =========================================================== + +And here are the values used on Windows: + +=============== =========================================================== +Type of file Installation directory +=============== =========================================================== +modules :file:`{userbase}\\Python{XY}\\site-packages` +scripts :file:`{userbase}\\Scripts` +data :file:`{userbase}` +C headers :file:`{userbase}\\Python{XY}\\Include\\{distname}` +=============== =========================================================== + +The advantage of using this scheme compared to the other ones described below is +that the user site-packages directory is under normal conditions always included +in :data:`sys.path` (see :mod:`site` for more information), which means that +there is no additional step to perform after running the :file:`setup.py` script +to finalize the installation. + +The :command:`build_ext` command also has a ``--user`` option to add +:file:`{userbase}/include` to the compiler search path for header files and +:file:`{userbase}/lib` to the compiler search path for libraries as well as to +the runtime search path for shared C libraries (rpath). + + +.. _inst-alt-install-home: Alternate installation: the home scheme --------------------------------------- @@ -330,23 +391,27 @@ python setup.py install --home=~ +To make Python find the distributions installed with this scheme, you may have +to :ref:`modify Python's search path ` or edit +:mod:`sitecustomize` (see :mod:`site`) to call :func:`site.addsitedir` or edit +:data:`sys.path`. + The :option:`--home` option defines the installation base directory. Files are installed to the following directories under the installation base as follows: -+------------------------------+---------------------------+-----------------------------+ -| Type of file | Installation Directory | Override option | -+==============================+===========================+=============================+ -| pure module distribution | :file:`{home}/lib/python` | :option:`--install-purelib` | -+------------------------------+---------------------------+-----------------------------+ -| non-pure module distribution | :file:`{home}/lib/python` | :option:`--install-platlib` | -+------------------------------+---------------------------+-----------------------------+ -| scripts | :file:`{home}/bin` | :option:`--install-scripts` | -+------------------------------+---------------------------+-----------------------------+ -| data | :file:`{home}/share` | :option:`--install-data` | -+------------------------------+---------------------------+-----------------------------+ +=============== =========================================================== +Type of file Installation directory +=============== =========================================================== +modules :file:`{home}/lib/python` +scripts :file:`{home}/bin` +data :file:`{home}` +C headers :file:`{home}/include/python/{distname}` +=============== =========================================================== +(Mentally replace slashes with backslashes if you're on Windows.) -.. _inst-alt-install-home: + +.. _inst-alt-install-prefix-unix: Alternate installation: Unix (the prefix scheme) ------------------------------------------------ @@ -355,7 +420,7 @@ perform the build/install (i.e., to run the setup script), but install modules into the third-party module directory of a different Python installation (or something that looks like a different Python installation). If this sounds a -trifle unusual, it is---that's why the "home scheme" comes first. However, +trifle unusual, it is---that's why the user and home schemes come before. However, there are at least two known cases where the prefix scheme will be useful. First, consider that many Linux distributions put Python in :file:`/usr`, rather @@ -383,17 +448,15 @@ executables, etc.) If :option:`--exec-prefix` is not supplied, it defaults to :option:`--prefix`. Files are installed as follows: -+------------------------------+-----------------------------------------------------+-----------------------------+ -| Type of file | Installation Directory | Override option | -+==============================+=====================================================+=============================+ -| pure module distribution | :file:`{prefix}/lib/python{X.Y}/site-packages` | :option:`--install-purelib` | -+------------------------------+-----------------------------------------------------+-----------------------------+ -| non-pure module distribution | :file:`{exec-prefix}/lib/python{X.Y}/site-packages` | :option:`--install-platlib` | -+------------------------------+-----------------------------------------------------+-----------------------------+ -| scripts | :file:`{prefix}/bin` | :option:`--install-scripts` | -+------------------------------+-----------------------------------------------------+-----------------------------+ -| data | :file:`{prefix}/share` | :option:`--install-data` | -+------------------------------+-----------------------------------------------------+-----------------------------+ +================= ========================================================== +Type of file Installation directory +================= ========================================================== +Python modules :file:`{prefix}/lib/python{X.Y}/site-packages` +extension modules :file:`{exec-prefix}/lib/python{X.Y}/site-packages` +scripts :file:`{prefix}/bin` +data :file:`{prefix}` +C headers :file:`{prefix}/include/python{X.Y}{abiflags}/{distname}` +================= ========================================================== There is no requirement that :option:`--prefix` or :option:`--exec-prefix` actually point to an alternate Python installation; if the directories listed @@ -418,7 +481,7 @@ alternate Python installation, this is immaterial.) -.. _inst-alt-install-windows: +.. _inst-alt-install-prefix-windows: Alternate installation: Windows (the prefix scheme) --------------------------------------------------- @@ -433,20 +496,18 @@ to install modules to the :file:`\\Temp\\Python` directory on the current drive. The installation base is defined by the :option:`--prefix` option; the -:option:`--exec-prefix` option is not supported under Windows. Files are -installed as follows: +:option:`--exec-prefix` option is not supported under Windows, which means that +pure Python modules and extension modules are installed into the same location. +Files are installed as follows: -+------------------------------+---------------------------+-----------------------------+ -| Type of file | Installation Directory | Override option | -+==============================+===========================+=============================+ -| pure module distribution | :file:`{prefix}` | :option:`--install-purelib` | -+------------------------------+---------------------------+-----------------------------+ -| non-pure module distribution | :file:`{prefix}` | :option:`--install-platlib` | -+------------------------------+---------------------------+-----------------------------+ -| scripts | :file:`{prefix}\\Scripts` | :option:`--install-scripts` | -+------------------------------+---------------------------+-----------------------------+ -| data | :file:`{prefix}\\Data` | :option:`--install-data` | -+------------------------------+---------------------------+-----------------------------+ +=============== ========================================================== +Type of file Installation directory +=============== ========================================================== +modules :file:`{prefix}\\Lib\\site-packages` +scripts :file:`{prefix}\\Scripts` +data :file:`{prefix}` +C headers :file:`{prefix}\\Include\\{distname}` +=============== ========================================================== .. _inst-custom-install: @@ -460,13 +521,29 @@ or you might want to completely redefine the installation scheme. In either case, you're creating a *custom installation scheme*. -You probably noticed the column of "override options" in the tables describing -the alternate installation schemes above. Those options are how you define a -custom installation scheme. These override options can be relative, absolute, +To create a custom installation scheme, you start with one of the alternate +schemes and override some of the installation directories used for the various +types of files, using these options: + +====================== ======================= +Type of file Override option +====================== ======================= +Python modules ``--install-purelib`` +extension modules ``--install-platlib`` +all modules ``--install-lib`` +scripts ``--install-scripts`` +data ``--install-data`` +C headers ``--install-headers`` +====================== ======================= + +These override options can be relative, absolute, or explicitly defined in terms of one of the installation base directories. (There are two installation base directories, and they are normally the same--- they only differ when you use the Unix "prefix scheme" and supply different -:option:`--prefix` and :option:`--exec-prefix` options.) +``--prefix`` and ``--exec-prefix`` options; using ``--install-lib`` will +override values computed or given for ``--install-purelib`` and +``--install-platlib``, and is recommended for schemes that don't make a +difference between Python and extension modules.) For example, say you're installing a module distribution to your home directory under Unix---but you want scripts to go in :file:`~/scripts` rather than @@ -493,15 +570,16 @@ a subdirectory of :file:`{prefix}`, rather than right in :file:`{prefix}` itself. This is almost as easy as customizing the script installation directory ---you just have to remember that there are two types of modules to worry about, -pure modules and non-pure modules (i.e., modules from a non-pure distribution). -For example:: +Python and extension modules, which can conveniently be both controlled by one +option:: - python setup.py install --install-purelib=Site --install-platlib=Site + python setup.py install --install-lib=Site -The specified installation directories are relative to :file:`{prefix}`. Of -course, you also have to ensure that these directories are in Python's module -search path, such as by putting a :file:`.pth` file in :file:`{prefix}`. See -section :ref:`inst-search-path` to find out how to modify Python's search path. +The specified installation directory is relative to :file:`{prefix}`. Of +course, you also have to ensure that this directory is in Python's module +search path, such as by putting a :file:`.pth` file in a site directory (see +:mod:`site`). See section :ref:`inst-search-path` to find out how to modify +Python's search path. If you want to define an entire installation scheme, you just have to supply all of the installation directory options. The recommended way to do this is to @@ -553,8 +631,8 @@ python setup.py install --install-base=/tmp -would install pure modules to :file:`{/tmp/python/lib}` in the first case, and -to :file:`{/tmp/lib}` in the second case. (For the second case, you probably +would install pure modules to :file:`/tmp/python/lib` in the first case, and +to :file:`/tmp/lib` in the second case. (For the second case, you probably want to supply an installation base of :file:`/tmp/python`.) You probably noticed the use of ``$HOME`` and ``$PLAT`` in the sample @@ -571,7 +649,7 @@ needed on those platforms? -.. XXX I'm not sure where this section should go. +.. XXX Move this to Doc/using .. _inst-search-path: diff --git a/Doc/install/install.rst b/Doc/install/install.rst --- a/Doc/install/install.rst +++ b/Doc/install/install.rst @@ -293,6 +293,14 @@ >>> sys.exec_prefix '/usr' +A few other placeholders are used in this document: :file:`{X.Y}` stands for the +version of Python, for example ``3.2``; :file:`{abiflags}` will be replaced by +the value of :data:`sys.abiflags` or the empty string for platforms which don't +define ABI flags; :file:`{distname}` will be replaced by the name of the module +distribution being installed. Dots and capitalization are important in the +paths; for example, a value that uses ``python3.2`` on UNIX will typically use +``Python32`` on Windows. + If you don't want to install modules to the standard location, or if you don't have permission to write there, then you need to read about alternate installations in section :ref:`packaging-alt-install`. If you want to customize your @@ -320,8 +328,72 @@ differ across platforms, so read whichever of the following sections applies to you. +Note that the various alternate installation schemes are mutually exclusive: you +can pass ``--user``, or ``--home``, or ``--prefix`` and ``--exec-prefix``, or +``--install-base`` and ``--install-platbase``, but you can't mix from these +groups. -.. _packaging-alt-install-prefix: + +.. _packaging-alt-install-user: + +Alternate installation: the user scheme +--------------------------------------- + +This scheme is designed to be the most convenient solution for users that don't +have write permission to the global site-packages directory or don't want to +install into it. It is enabled with a simple option:: + + pysetup run install_dist --user + +Files will be installed into subdirectories of :data:`site.USER_BASE` (written +as :file:`{userbase}` hereafter). This scheme installs pure Python modules and +extension modules in the same location (also known as :data:`site.USER_SITE`). +Here are the values for UNIX, including non-framework builds on Mac OS X: + +=============== =========================================================== +Type of file Installation directory +=============== =========================================================== +modules :file:`{userbase}/lib/python{X.Y}/site-packages` +scripts :file:`{userbase}/bin` +data :file:`{userbase}` +C headers :file:`{userbase}/include/python{X.Y}` +=============== =========================================================== + +Framework builds on Mac OS X use these paths: + +=============== =========================================================== +Type of file Installation directory +=============== =========================================================== +modules :file:`{userbase}/lib/python/site-packages` +scripts :file:`{userbase}/bin` +data :file:`{userbase}` +C headers :file:`{userbase}/include/python` +=============== =========================================================== + +And here are the values used on Windows: + +=============== =========================================================== +Type of file Installation directory +=============== =========================================================== +modules :file:`{userbase}\\Python{XY}\\site-packages` +scripts :file:`{userbase}\\Scripts` +data :file:`{userbase}` +C headers :file:`{userbase}\\Python{XY}\\Include` +=============== =========================================================== + +The advantage of using this scheme compared to the other ones described below is +that the user site-packages directory is under normal conditions always included +in :data:`sys.path` (see :mod:`site` for more information), which means that +there is no additional step to perform after running ``pysetup`` to finalize the +installation. + +The :command:`build_ext` command also has a ``--user`` option to add +:file:`{userbase}/include` to the compiler search path for header files and +:file:`{userbase}/lib` to the compiler search path for libraries as well as to +the runtime search path for shared C libraries (rpath). + + +.. _packaging-alt-install-home: Alternate installation: the home scheme --------------------------------------- @@ -343,23 +415,27 @@ pysetup run install_dist --home ~ +To make Python find the distributions installed with this scheme, you may have +to :ref:`modify Python's search path ` or edit +:mod:`sitecustomize` (see :mod:`site`) to call :func:`site.addsitedir` or edit +:data:`sys.path`. + The :option:`--home` option defines the base directory for the installation. Under it, files are installed to the following directories: -+------------------------------+---------------------------+-----------------------------+ -| Type of file | Installation Directory | Override option | -+==============================+===========================+=============================+ -| pure module distribution | :file:`{home}/lib/python` | :option:`--install-purelib` | -+------------------------------+---------------------------+-----------------------------+ -| non-pure module distribution | :file:`{home}/lib/python` | :option:`--install-platlib` | -+------------------------------+---------------------------+-----------------------------+ -| scripts | :file:`{home}/bin` | :option:`--install-scripts` | -+------------------------------+---------------------------+-----------------------------+ -| data | :file:`{home}/share` | :option:`--install-data` | -+------------------------------+---------------------------+-----------------------------+ +=============== =========================================================== +Type of file Installation directory +=============== =========================================================== +modules :file:`{home}/lib/python` +scripts :file:`{home}/bin` +data :file:`{home}` +C headers :file:`{home}/include/python` +=============== =========================================================== +(Mentally replace slashes with backslashes if you're on Windows.) -.. _packaging-alt-install-home: + +.. _packaging-alt-install-prefix-unix: Alternate installation: Unix (the prefix scheme) ------------------------------------------------ @@ -368,8 +444,8 @@ run the build command, but install modules into the third-party module directory of a different Python installation (or something that looks like a different Python installation). If this sounds a trifle unusual, it is ---that's why the -"home scheme" comes first. However, there are at least two known cases where the -prefix scheme will be useful. +user and home schemes come before. However, there are at least two known cases +where the prefix scheme will be useful. First, consider that many Linux distributions put Python in :file:`/usr`, rather than the more traditional :file:`/usr/local`. This is entirely appropriate, @@ -396,17 +472,17 @@ executables, etc.) If :option:`--exec-prefix` is not supplied, it defaults to :option:`--prefix`. Files are installed as follows: -+------------------------------+-----------------------------------------------------+-----------------------------+ -| Type of file | Installation Directory | Override option | -+==============================+=====================================================+=============================+ -| pure module distribution | :file:`{prefix}/lib/python{X.Y}/site-packages` | :option:`--install-purelib` | -+------------------------------+-----------------------------------------------------+-----------------------------+ -| non-pure module distribution | :file:`{exec-prefix}/lib/python{X.Y}/site-packages` | :option:`--install-platlib` | -+------------------------------+-----------------------------------------------------+-----------------------------+ -| scripts | :file:`{prefix}/bin` | :option:`--install-scripts` | -+------------------------------+-----------------------------------------------------+-----------------------------+ -| data | :file:`{prefix}/share` | :option:`--install-data` | -+------------------------------+-----------------------------------------------------+-----------------------------+ +================= ========================================================== +Type of file Installation directory +================= ========================================================== +Python modules :file:`{prefix}/lib/python{X.Y}/site-packages` +extension modules :file:`{exec-prefix}/lib/python{X.Y}/site-packages` +scripts :file:`{prefix}/bin` +data :file:`{prefix}` +C headers :file:`{prefix}/include/python{X.Y}{abiflags}` +================= ========================================================== + +.. XXX misses an entry for platinclude There is no requirement that :option:`--prefix` or :option:`--exec-prefix` actually point to an alternate Python installation; if the directories listed @@ -432,7 +508,7 @@ this is immaterial.) -.. _packaging-alt-install-windows: +.. _packaging-alt-install-prefix-windows: Alternate installation: Windows (the prefix scheme) --------------------------------------------------- @@ -447,20 +523,18 @@ to install modules to the :file:`\\Temp\\Python` directory on the current drive. The installation base is defined by the :option:`--prefix` option; the -:option:`--exec-prefix` option is unsupported under Windows. Files are -installed as follows: +:option:`--exec-prefix` option is not supported under Windows, which means that +pure Python modules and extension modules are installed into the same location. +Files are installed as follows: -+------------------------------+---------------------------+-----------------------------+ -| Type of file | Installation Directory | Override option | -+==============================+===========================+=============================+ -| pure module distribution | :file:`{prefix}` | :option:`--install-purelib` | -+------------------------------+---------------------------+-----------------------------+ -| non-pure module distribution | :file:`{prefix}` | :option:`--install-platlib` | -+------------------------------+---------------------------+-----------------------------+ -| scripts | :file:`{prefix}\\Scripts` | :option:`--install-scripts` | -+------------------------------+---------------------------+-----------------------------+ -| data | :file:`{prefix}\\Data` | :option:`--install-data` | -+------------------------------+---------------------------+-----------------------------+ +=============== ========================================================== +Type of file Installation directory +=============== ========================================================== +modules :file:`{prefix}\\Lib\\site-packages` +scripts :file:`{prefix}\\Scripts` +data :file:`{prefix}` +C headers :file:`{prefix}\\Include` +=============== ========================================================== .. _packaging-custom-install: @@ -474,13 +548,29 @@ directory, or you might want to completely redefine the installation scheme. In either case, you're creating a *custom installation scheme*. -You probably noticed the column of "override options" in the tables describing -the alternate installation schemes above. Those options are how you define a -custom installation scheme. These override options can be relative, absolute, +To create a custom installation scheme, you start with one of the alternate +schemes and override some of the installation directories used for the various +types of files, using these options: + +====================== ======================= +Type of file Override option +====================== ======================= +Python modules ``--install-purelib`` +extension modules ``--install-platlib`` +all modules ``--install-lib`` +scripts ``--install-scripts`` +data ``--install-data`` +C headers ``--install-headers`` +====================== ======================= + +These override options can be relative, absolute, or explicitly defined in terms of one of the installation base directories. (There are two installation base directories, and they are normally the same ---they only differ when you use the Unix "prefix scheme" and supply different -:option:`--prefix` and :option:`--exec-prefix` options.) +``--prefix`` and ``--exec-prefix`` options; using ``--install-lib`` will +override values computed or given for ``--install-purelib`` and +``--install-platlib``, and is recommended for schemes that don't make a +difference between Python and extension modules.) For example, say you're installing a module distribution to your home directory under Unix, but you want scripts to go in :file:`~/scripts` rather than @@ -507,17 +597,18 @@ a subdirectory of :file:`{prefix}`, rather than right in :file:`{prefix}` itself. This is almost as easy as customizing the script installation directory ---you just have to remember that there are two types of modules to worry about, -pure modules and non-pure modules (i.e., modules from a non-pure distribution). -For example:: +Python and extension modules, which can conveniently be both controlled by one +option:: - pysetup run install_dist --install-purelib Site --install-platlib Site + pysetup run install_dist --install-lib Site .. XXX Nothing is installed right under prefix in windows, is it?? -The specified installation directories are relative to :file:`{prefix}`. Of -course, you also have to ensure that these directories are in Python's module -search path, such as by putting a :file:`.pth` file in :file:`{prefix}`. See -section :ref:`packaging-search-path` to find out how to modify Python's search path. +The specified installation directory is relative to :file:`{prefix}`. Of +course, you also have to ensure that this directory is in Python's module +search path, such as by putting a :file:`.pth` file in a site directory (see +:mod:`site`). See section :ref:`packaging-search-path` to find out how to modify +Python's search path. If you want to define an entire installation scheme, you just have to supply all of the installation directory options. Using relative paths is recommended here. diff --git a/Doc/library/site.rst b/Doc/library/site.rst --- a/Doc/library/site.rst +++ b/Doc/library/site.rst @@ -2,22 +2,24 @@ ================================================ .. module:: site - :synopsis: A standard way to reference site-specific modules. + :synopsis: Module responsible for site-specific configuration. **Source code:** :source:`Lib/site.py` -------------- +.. highlightlang:: none + **This module is automatically imported during initialization.** The automatic import can be suppressed using the interpreter's :option:`-S` option. .. index:: triple: module; search; path -Importing this module will append site-specific paths to the module search -path, unless :option:`-S` was used. In that case, this module can be safely -imported with no automatic modifications to the module search path. To -explicitly trigger the usual site-specific additions, call the -:func:`site.main` function. +Importing this module will append site-specific paths to the module search path +and add a few builtins, unless :option:`-S` was used. In that case, this module +can be safely imported with no automatic modifications to the module search path +or additions to the builtins. To explicitly trigger the usual site-specific +additions, call the :func:`site.main` function. .. versionchanged:: 3.3 Importing the module used to trigger paths manipulation even when using @@ -36,11 +38,11 @@ if it refers to an existing directory, and if so, adds it to ``sys.path`` and also inspects the newly added path for configuration files. -A path configuration file is a file whose name has the form :file:`package.pth` +A path configuration file is a file whose name has the form :file:`{name}.pth` and exists in one of the four directories mentioned above; its contents are additional items (one per line) to be added to ``sys.path``. Non-existing items -are never added to ``sys.path``, but no check is made that the item refers to a -directory (rather than a file). No item is added to ``sys.path`` more than +are never added to ``sys.path``, and no check is made that the item refers to a +directory rather than a file. No item is added to ``sys.path`` more than once. Blank lines and lines beginning with ``#`` are skipped. Lines starting with ``import`` (followed by space or tab) are executed. @@ -50,8 +52,7 @@ For example, suppose ``sys.prefix`` and ``sys.exec_prefix`` are set to :file:`/usr/local`. The Python X.Y library is then installed in -:file:`/usr/local/lib/python{X.Y}` (where only the first three characters of -``sys.version`` are used to form the installation path name). Suppose this has +:file:`/usr/local/lib/python{X.Y}`. Suppose this has a subdirectory :file:`/usr/local/lib/python{X.Y}/site-packages` with three subsubdirectories, :file:`foo`, :file:`bar` and :file:`spam`, and two path configuration files, :file:`foo.pth` and :file:`bar.pth`. Assume @@ -84,42 +85,59 @@ After these path manipulations, an attempt is made to import a module named :mod:`sitecustomize`, which can perform arbitrary site-specific customizations. -If this import fails with an :exc:`ImportError` exception, it is silently -ignored. +It is typically created by a system administrator in the site-packages +directory. If this import fails with an :exc:`ImportError` exception, it is +silently ignored. -.. index:: module: sitecustomize +.. index:: module: usercustomize + +After this, an attempt is made to import a module named :mod:`usercustomize`, +which can perform arbitrary user-specific customizations, if +:data:`ENABLE_USER_SITE` is true. This file is intended to be created in the +user site-packages directory (see below), which is part of ``sys.path`` unless +disabled by :option:`-s`. An :exc:`ImportError` will be silently ignored. Note that for some non-Unix systems, ``sys.prefix`` and ``sys.exec_prefix`` are empty, and the path manipulations are skipped; however the import of -:mod:`sitecustomize` is still attempted. +:mod:`sitecustomize` and :mod:`usercustomize` is still attempted. .. data:: PREFIXES - A list of prefixes for site package directories + A list of prefixes for site-packages directories. .. data:: ENABLE_USER_SITE - Flag showing the status of the user site directory. True means the - user site directory is enabled and added to sys.path. When the flag - is None the user site directory is disabled for security reasons. + Flag showing the status of the user site-packages directory. ``True`` means + that it is enabled and was added to ``sys.path``. ``False`` means that it + was disabled by user request (with :option:`-s` or + :envvar:`PYTHONNOUSERSITE`). ``None`` means it was disabled for security + reasons (mismatch between user or group id and effective id) or by an + administrator. .. data:: USER_SITE - Path to the user site directory for the current Python version or None + Path to the user site-packages for the running Python. Can be ``None`` if + :func:`getusersitepackages` hasn't been called yet. Default value is + :file:`~/.local/lib/python{X.Y}/site-packages` for UNIX and non-framework Mac + OS X builds, :file:`~/Library/Python/{X.Y}/lib/python/site-packages` for Mac + framework builds, and :file:`{%APPDATA%}\\Python\\Python{XY}\\site-packages` + on Windows. This directory is a site directory, which means that + :file:`.pth` files in it will be processed. .. data:: USER_BASE - Path to the base directory for user site directories - - -.. envvar:: PYTHONNOUSERSITE - - -.. envvar:: PYTHONUSERBASE + Path to the base directory for the user site-packages. Can be ``None`` if + :func:`getuserbase` hasn't been called yet. Default value is + :file:`~/.local` for UNIX and Mac OS X non-framework builds, + :file:`~/Library/Python/{X.Y}` for Mac framework builds, and + :file:`{%APPDATA%}\\Python` for Windows. This value is used by Packaging to + compute the installation directories for scripts, data files, Python modules, + etc. for the :ref:`user installation scheme `. + See also :envvar:`PYTHONUSERBASE`. .. function:: main() @@ -135,34 +153,67 @@ .. function:: addsitedir(sitedir, known_paths=None) - Adds a directory to sys.path and processes its pth files. + Add a directory to sys.path and process its :file:`.pth` files. Typically + used in :mod:`sitecustomize` or :mod:`usercustomize` (see above). + .. function:: getsitepackages() - Returns a list containing all global site-packages directories - (and possibly site-python). + Return a list containing all global site-packages directories (and possibly + site-python). .. versionadded:: 3.2 + .. function:: getuserbase() - Returns the "user base" directory path. - - The "user base" directory can be used to store data. If the global - variable ``USER_BASE`` is not initialized yet, this function will also set - it. + Return the path of the user base directory, :data:`USER_BASE`. If it is not + initialized yet, this function will also set it, respecting + :envvar:`PYTHONUSERBASE`. .. versionadded:: 3.2 + .. function:: getusersitepackages() - Returns the user-specific site-packages directory path. - - If the global variable ``USER_SITE`` is not initialized yet, this - function will also set it. + Return the path of the user-specific site-packages directory, + :data:`USER_SITE`. If it is not initialized yet, this function will also set + it, respecting :envvar:`PYTHONNOUSERSITE` and :data:`USER_BASE`. .. versionadded:: 3.2 -.. XXX Update documentation -.. XXX document python -m site --user-base --user-site +The :mod:`site` module also provides a way to get the user directories from the +command line: + +.. code-block:: sh + + $ python3 -m site --user-site + /home/user/.local/lib/python3.3/site-packages + +.. program:: site + +If it is called without arguments, it will print the contents of +:data:`sys.path` on the standard output, followed by the value of +:data:`USER_BASE` and whether the directory exists, then the same thing for +:data:`USER_SITE`, and finally the value of :data:`ENABLE_USER_SITE`. + +.. cmdoption:: --user-base + + Print the path to the user base directory. + +.. cmdoption:: --user-site + + Print the path to the user site-packages directory. + +If both options are given, user base and user site will be printed (always in +this order), separated by :data:`os.pathsep`. + +If any option is given, the script will exit with one of these values: ``O`` if +the user site-packages directory is enabled, ``1`` if it was disabled by the +user, ``2`` if it is disabled for security reasons or by an administrator, and a +value greater than 2 if there is an error. + +.. seealso:: + + :pep:`370` -- Per user site-packages directory diff --git a/Doc/packaging/commandref.rst b/Doc/packaging/commandref.rst --- a/Doc/packaging/commandref.rst +++ b/Doc/packaging/commandref.rst @@ -12,6 +12,12 @@ description. Use pysetup run --help to get help about the options of one command. +.. XXX sections from this document should be merged with other docs (e.g. check + and upload with uploading.rst, install_* with install/install.rst, etc.); + there is no value in partially duplicating information. this file could + however serve as an index, i.e. just a list of all commands with links to + every section that describes options or usage + Preparing distributions ======================= @@ -310,13 +316,8 @@ ----------------------- Install a distribution, delegating to the other :command:`install_*` commands to -do the work. - -.. program:: packaging install_dist - -.. cmdoption:: --user - - Install in user site-packages directory (see :PEP:`370`). +do the work. See :ref:`packaging-how-install-works` for complete usage +instructions. :command:`install_data` diff --git a/Doc/tutorial/interactive.rst b/Doc/tutorial/interactive.rst --- a/Doc/tutorial/interactive.rst +++ b/Doc/tutorial/interactive.rst @@ -156,17 +156,18 @@ quotes, etc., would also be useful. One alternative enhanced interactive interpreter that has been around for quite -some time is `IPython`_, which features tab completion, object exploration and +some time is IPython_, which features tab completion, object exploration and advanced history management. It can also be thoroughly customized and embedded into other applications. Another similar enhanced interactive environment is -`bpython`_. +bpython_. .. rubric:: Footnotes .. [#] Python will execute the contents of a file identified by the :envvar:`PYTHONSTARTUP` environment variable when you start an interactive - interpreter. + interpreter. To customize Python even for non-interactive mode, see + :ref:`tut-customize`. .. _GNU Readline: http://tiswww.case.edu/php/chet/readline/rltop.html diff --git a/Doc/tutorial/interpreter.rst b/Doc/tutorial/interpreter.rst --- a/Doc/tutorial/interpreter.rst +++ b/Doc/tutorial/interpreter.rst @@ -236,6 +236,29 @@ exec(open(filename).read()) +.. _tut-customize: + +The Customization Modules +------------------------- + +Python provides two hooks to let you customize it: :mod:`sitecustomize` and +:mod:`usercustomize`. To see how it works, you need first to find the location +of your user site-packages directory. Start Python and run this code: + + >>> import site + >>> site.getusersitepackages() + '/home/user/.local/lib/python3.2/site-packages' + +Now you can create a file named :file:`usercustomize.py` in that directory and +put anything you want in it. It will affect every invocation of Python, unless +it is started with the :option:`-s` option to disable the automatic import. + +:mod:`sitecustomize` works in the same way, but is typically created by an +administrator of the computer in the global site-packages directory, and is +imported before :mod:`usercustomize`. See the documentation of the :mod:`site` +module for more details. + + .. rubric:: Footnotes .. [#] On Unix, the Python 3.x interpreter is by default not installed with the @@ -243,4 +266,3 @@ simultaneously installed Python 2.x executable. .. [#] A problem with the GNU Readline package may prevent this. - diff --git a/Doc/using/cmdline.rst b/Doc/using/cmdline.rst --- a/Doc/using/cmdline.rst +++ b/Doc/using/cmdline.rst @@ -229,7 +229,8 @@ .. cmdoption:: -s - Don't add user site directory to sys.path + Don't add the :data:`user site-packages directory ` to + :data:`sys.path`. .. seealso:: @@ -470,7 +471,8 @@ .. envvar:: PYTHONNOUSERSITE - If this is set, Python won't add the user site directory to sys.path + If this is set, Python won't add the :data:`user site-packages directory + ` to :data:`sys.path`. .. seealso:: @@ -479,7 +481,10 @@ .. envvar:: PYTHONUSERBASE - Sets the base directory for the user site directory + Defines the :data:`user base directory `, which is used to + compute the path of the :data:`user site-packages directory ` + and :ref:`Packaging installation paths ` for + ``pysetup run install_dist --user``. .. seealso:: -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Tue Aug 9 18:11:16 2011 From: python-checkins at python.org (eric.araujo) Date: Tue, 09 Aug 2011 18:11:16 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Clean_up_shutil=2Edisk=5Fus?= =?utf8?q?age=2E?= Message-ID: http://hg.python.org/cpython/rev/f1e761dea2ee changeset: 71781:f1e761dea2ee user: ?ric Araujo date: Mon Aug 08 16:51:11 2011 +0200 summary: Clean up shutil.disk_usage. - Move a test from call time to define time - Add the function name to __all__ - Improve docstring and docs A few lines are now duplicated (named tuple definition and docstring) but I think the end result reads better. files: Doc/library/shutil.rst | 5 ++- Lib/shutil.py | 44 ++++++++++++++++++++--------- 2 files changed, 33 insertions(+), 16 deletions(-) diff --git a/Doc/library/shutil.rst b/Doc/library/shutil.rst --- a/Doc/library/shutil.rst +++ b/Doc/library/shutil.rst @@ -175,8 +175,9 @@ .. function:: disk_usage(path) - Return disk usage statistics about the given path as a namedtuple including - total, used and free space expressed in bytes. + Return disk usage statistics about the given path as a :term:`named tuple` + with the attributes *total*, *used* and *free*, which are the amount of + total, used and free space, in bytes. .. versionadded:: 3.3 diff --git a/Lib/shutil.py b/Lib/shutil.py --- a/Lib/shutil.py +++ b/Lib/shutil.py @@ -12,7 +12,6 @@ import collections import errno import tarfile -from collections import namedtuple try: import bz2 @@ -36,6 +35,7 @@ "register_archive_format", "unregister_archive_format", "get_unpack_formats", "register_unpack_format", "unregister_unpack_format", "unpack_archive"] + # disk_usage is added later, if available on the platform class Error(EnvironmentError): pass @@ -756,20 +756,36 @@ kwargs = dict(_UNPACK_FORMATS[format][2]) func(filename, extract_dir, **kwargs) -if hasattr(os, "statvfs") or os.name == 'nt': - _ntuple_diskusage = namedtuple('usage', 'total used free') + +if hasattr(os, 'statvfs'): + + __all__.append('disk_usage') + _ntuple_diskusage = collections.namedtuple('usage', 'total used free') def disk_usage(path): - """Return disk usage statistics about the given path as a namedtuple - including total, used and free space expressed in bytes. + """Return disk usage statistics about the given path. + + Returned valus is a named tuple with attributes 'total', 'used' and + 'free', which are the amount of total, used and free space, in bytes. """ - if hasattr(os, "statvfs"): - st = os.statvfs(path) - free = (st.f_bavail * st.f_frsize) - total = (st.f_blocks * st.f_frsize) - used = (st.f_blocks - st.f_bfree) * st.f_frsize - else: - import nt - total, free = nt._getdiskusage(path) - used = total - free + st = os.statvfs(path) + free = st.f_bavail * st.f_frsize + total = st.f_blocks * st.f_frsize + used = (st.f_blocks - st.f_bfree) * st.f_frsize return _ntuple_diskusage(total, used, free) + +elif os.name == 'nt': + + import nt + __all__.append('disk_usage') + _ntuple_diskusage = collections.namedtuple('usage', 'total used free') + + def disk_usage(path): + """Return disk usage statistics about the given path. + + Returned valus is a named tuple with attributes 'total', 'used' and + 'free', which are the amount of total, used and free space, in bytes. + """ + total, free = nt._getdiskusage(path) + used = total - free + return _ntuple_diskusage(total, used, free) -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Tue Aug 9 18:11:17 2011 From: python-checkins at python.org (eric.araujo) Date: Tue, 09 Aug 2011 18:11:17 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=28merge_default_-=3E_default?= =?utf8?q?=29=3A_Branch_merge?= Message-ID: http://hg.python.org/cpython/rev/619889035734 changeset: 71782:619889035734 parent: 71773:0c922e7d26e1 parent: 71781:f1e761dea2ee user: ?ric Araujo date: Mon Aug 08 16:56:17 2011 +0200 summary: Branch merge files: Doc/distutils/install.rst | 184 +++++++++++++++------ Doc/install/install.rst | 193 ++++++++++++++++------ Doc/library/shutil.rst | 5 +- Doc/library/site.rst | 133 ++++++++++---- Doc/packaging/commandref.rst | 15 +- Doc/tutorial/interactive.rst | 7 +- Doc/tutorial/interpreter.rst | 24 ++- Doc/using/cmdline.rst | 11 +- Lib/shutil.py | 44 +++- 9 files changed, 441 insertions(+), 175 deletions(-) diff --git a/Doc/distutils/install.rst b/Doc/distutils/install.rst --- a/Doc/distutils/install.rst +++ b/Doc/distutils/install.rst @@ -279,6 +279,14 @@ >>> sys.exec_prefix '/usr' +A few other placeholders are used in this document: :file:`{X.Y}` stands for the +version of Python, for example ``3.2``; :file:`{abiflags}` will be replaced by +the value of :data:`sys.abiflags` or the empty string for platforms which don't +define ABI flags; :file:`{distname}` will be replaced by the name of the module +distribution being installed. Dots and capitalization are important in the +paths; for example, a value that uses ``python3.2`` on UNIX will typically use +``Python32`` on Windows. + If you don't want to install modules to the standard location, or if you don't have permission to write there, then you need to read about alternate installations in section :ref:`inst-alt-install`. If you want to customize your @@ -307,8 +315,61 @@ differ across platforms, so read whichever of the following sections applies to you. +Note that the various alternate installation schemes are mutually exclusive: you +can pass ``--user``, or ``--home``, or ``--prefix`` and ``--exec-prefix``, or +``--install-base`` and ``--install-platbase``, but you can't mix from these +groups. -.. _inst-alt-install-prefix: + +.. _inst-alt-install-user: + +Alternate installation: the user scheme +--------------------------------------- + +This scheme is designed to be the most convenient solution for users that don't +have write permission to the global site-packages directory or don't want to +install into it. It is enabled with a simple option:: + + python setup.py install --user + +Files will be installed into subdirectories of :data:`site.USER_BASE` (written +as :file:`{userbase}` hereafter). This scheme installs pure Python modules and +extension modules in the same location (also known as :data:`site.USER_SITE`). +Here are the values for UNIX, including Mac OS X: + +=============== =========================================================== +Type of file Installation directory +=============== =========================================================== +modules :file:`{userbase}/lib/python{X.Y}/site-packages` +scripts :file:`{userbase}/bin` +data :file:`{userbase}` +C headers :file:`{userbase}/include/python{X.Y}{abiflags}/{distname}` +=============== =========================================================== + +And here are the values used on Windows: + +=============== =========================================================== +Type of file Installation directory +=============== =========================================================== +modules :file:`{userbase}\\Python{XY}\\site-packages` +scripts :file:`{userbase}\\Scripts` +data :file:`{userbase}` +C headers :file:`{userbase}\\Python{XY}\\Include\\{distname}` +=============== =========================================================== + +The advantage of using this scheme compared to the other ones described below is +that the user site-packages directory is under normal conditions always included +in :data:`sys.path` (see :mod:`site` for more information), which means that +there is no additional step to perform after running the :file:`setup.py` script +to finalize the installation. + +The :command:`build_ext` command also has a ``--user`` option to add +:file:`{userbase}/include` to the compiler search path for header files and +:file:`{userbase}/lib` to the compiler search path for libraries as well as to +the runtime search path for shared C libraries (rpath). + + +.. _inst-alt-install-home: Alternate installation: the home scheme --------------------------------------- @@ -330,23 +391,27 @@ python setup.py install --home=~ +To make Python find the distributions installed with this scheme, you may have +to :ref:`modify Python's search path ` or edit +:mod:`sitecustomize` (see :mod:`site`) to call :func:`site.addsitedir` or edit +:data:`sys.path`. + The :option:`--home` option defines the installation base directory. Files are installed to the following directories under the installation base as follows: -+------------------------------+---------------------------+-----------------------------+ -| Type of file | Installation Directory | Override option | -+==============================+===========================+=============================+ -| pure module distribution | :file:`{home}/lib/python` | :option:`--install-purelib` | -+------------------------------+---------------------------+-----------------------------+ -| non-pure module distribution | :file:`{home}/lib/python` | :option:`--install-platlib` | -+------------------------------+---------------------------+-----------------------------+ -| scripts | :file:`{home}/bin` | :option:`--install-scripts` | -+------------------------------+---------------------------+-----------------------------+ -| data | :file:`{home}/share` | :option:`--install-data` | -+------------------------------+---------------------------+-----------------------------+ +=============== =========================================================== +Type of file Installation directory +=============== =========================================================== +modules :file:`{home}/lib/python` +scripts :file:`{home}/bin` +data :file:`{home}` +C headers :file:`{home}/include/python/{distname}` +=============== =========================================================== +(Mentally replace slashes with backslashes if you're on Windows.) -.. _inst-alt-install-home: + +.. _inst-alt-install-prefix-unix: Alternate installation: Unix (the prefix scheme) ------------------------------------------------ @@ -355,7 +420,7 @@ perform the build/install (i.e., to run the setup script), but install modules into the third-party module directory of a different Python installation (or something that looks like a different Python installation). If this sounds a -trifle unusual, it is---that's why the "home scheme" comes first. However, +trifle unusual, it is---that's why the user and home schemes come before. However, there are at least two known cases where the prefix scheme will be useful. First, consider that many Linux distributions put Python in :file:`/usr`, rather @@ -383,17 +448,15 @@ executables, etc.) If :option:`--exec-prefix` is not supplied, it defaults to :option:`--prefix`. Files are installed as follows: -+------------------------------+-----------------------------------------------------+-----------------------------+ -| Type of file | Installation Directory | Override option | -+==============================+=====================================================+=============================+ -| pure module distribution | :file:`{prefix}/lib/python{X.Y}/site-packages` | :option:`--install-purelib` | -+------------------------------+-----------------------------------------------------+-----------------------------+ -| non-pure module distribution | :file:`{exec-prefix}/lib/python{X.Y}/site-packages` | :option:`--install-platlib` | -+------------------------------+-----------------------------------------------------+-----------------------------+ -| scripts | :file:`{prefix}/bin` | :option:`--install-scripts` | -+------------------------------+-----------------------------------------------------+-----------------------------+ -| data | :file:`{prefix}/share` | :option:`--install-data` | -+------------------------------+-----------------------------------------------------+-----------------------------+ +================= ========================================================== +Type of file Installation directory +================= ========================================================== +Python modules :file:`{prefix}/lib/python{X.Y}/site-packages` +extension modules :file:`{exec-prefix}/lib/python{X.Y}/site-packages` +scripts :file:`{prefix}/bin` +data :file:`{prefix}` +C headers :file:`{prefix}/include/python{X.Y}{abiflags}/{distname}` +================= ========================================================== There is no requirement that :option:`--prefix` or :option:`--exec-prefix` actually point to an alternate Python installation; if the directories listed @@ -418,7 +481,7 @@ alternate Python installation, this is immaterial.) -.. _inst-alt-install-windows: +.. _inst-alt-install-prefix-windows: Alternate installation: Windows (the prefix scheme) --------------------------------------------------- @@ -433,20 +496,18 @@ to install modules to the :file:`\\Temp\\Python` directory on the current drive. The installation base is defined by the :option:`--prefix` option; the -:option:`--exec-prefix` option is not supported under Windows. Files are -installed as follows: +:option:`--exec-prefix` option is not supported under Windows, which means that +pure Python modules and extension modules are installed into the same location. +Files are installed as follows: -+------------------------------+---------------------------+-----------------------------+ -| Type of file | Installation Directory | Override option | -+==============================+===========================+=============================+ -| pure module distribution | :file:`{prefix}` | :option:`--install-purelib` | -+------------------------------+---------------------------+-----------------------------+ -| non-pure module distribution | :file:`{prefix}` | :option:`--install-platlib` | -+------------------------------+---------------------------+-----------------------------+ -| scripts | :file:`{prefix}\\Scripts` | :option:`--install-scripts` | -+------------------------------+---------------------------+-----------------------------+ -| data | :file:`{prefix}\\Data` | :option:`--install-data` | -+------------------------------+---------------------------+-----------------------------+ +=============== ========================================================== +Type of file Installation directory +=============== ========================================================== +modules :file:`{prefix}\\Lib\\site-packages` +scripts :file:`{prefix}\\Scripts` +data :file:`{prefix}` +C headers :file:`{prefix}\\Include\\{distname}` +=============== ========================================================== .. _inst-custom-install: @@ -460,13 +521,29 @@ or you might want to completely redefine the installation scheme. In either case, you're creating a *custom installation scheme*. -You probably noticed the column of "override options" in the tables describing -the alternate installation schemes above. Those options are how you define a -custom installation scheme. These override options can be relative, absolute, +To create a custom installation scheme, you start with one of the alternate +schemes and override some of the installation directories used for the various +types of files, using these options: + +====================== ======================= +Type of file Override option +====================== ======================= +Python modules ``--install-purelib`` +extension modules ``--install-platlib`` +all modules ``--install-lib`` +scripts ``--install-scripts`` +data ``--install-data`` +C headers ``--install-headers`` +====================== ======================= + +These override options can be relative, absolute, or explicitly defined in terms of one of the installation base directories. (There are two installation base directories, and they are normally the same--- they only differ when you use the Unix "prefix scheme" and supply different -:option:`--prefix` and :option:`--exec-prefix` options.) +``--prefix`` and ``--exec-prefix`` options; using ``--install-lib`` will +override values computed or given for ``--install-purelib`` and +``--install-platlib``, and is recommended for schemes that don't make a +difference between Python and extension modules.) For example, say you're installing a module distribution to your home directory under Unix---but you want scripts to go in :file:`~/scripts` rather than @@ -493,15 +570,16 @@ a subdirectory of :file:`{prefix}`, rather than right in :file:`{prefix}` itself. This is almost as easy as customizing the script installation directory ---you just have to remember that there are two types of modules to worry about, -pure modules and non-pure modules (i.e., modules from a non-pure distribution). -For example:: +Python and extension modules, which can conveniently be both controlled by one +option:: - python setup.py install --install-purelib=Site --install-platlib=Site + python setup.py install --install-lib=Site -The specified installation directories are relative to :file:`{prefix}`. Of -course, you also have to ensure that these directories are in Python's module -search path, such as by putting a :file:`.pth` file in :file:`{prefix}`. See -section :ref:`inst-search-path` to find out how to modify Python's search path. +The specified installation directory is relative to :file:`{prefix}`. Of +course, you also have to ensure that this directory is in Python's module +search path, such as by putting a :file:`.pth` file in a site directory (see +:mod:`site`). See section :ref:`inst-search-path` to find out how to modify +Python's search path. If you want to define an entire installation scheme, you just have to supply all of the installation directory options. The recommended way to do this is to @@ -553,8 +631,8 @@ python setup.py install --install-base=/tmp -would install pure modules to :file:`{/tmp/python/lib}` in the first case, and -to :file:`{/tmp/lib}` in the second case. (For the second case, you probably +would install pure modules to :file:`/tmp/python/lib` in the first case, and +to :file:`/tmp/lib` in the second case. (For the second case, you probably want to supply an installation base of :file:`/tmp/python`.) You probably noticed the use of ``$HOME`` and ``$PLAT`` in the sample @@ -571,7 +649,7 @@ needed on those platforms? -.. XXX I'm not sure where this section should go. +.. XXX Move this to Doc/using .. _inst-search-path: diff --git a/Doc/install/install.rst b/Doc/install/install.rst --- a/Doc/install/install.rst +++ b/Doc/install/install.rst @@ -293,6 +293,14 @@ >>> sys.exec_prefix '/usr' +A few other placeholders are used in this document: :file:`{X.Y}` stands for the +version of Python, for example ``3.2``; :file:`{abiflags}` will be replaced by +the value of :data:`sys.abiflags` or the empty string for platforms which don't +define ABI flags; :file:`{distname}` will be replaced by the name of the module +distribution being installed. Dots and capitalization are important in the +paths; for example, a value that uses ``python3.2`` on UNIX will typically use +``Python32`` on Windows. + If you don't want to install modules to the standard location, or if you don't have permission to write there, then you need to read about alternate installations in section :ref:`packaging-alt-install`. If you want to customize your @@ -320,8 +328,72 @@ differ across platforms, so read whichever of the following sections applies to you. +Note that the various alternate installation schemes are mutually exclusive: you +can pass ``--user``, or ``--home``, or ``--prefix`` and ``--exec-prefix``, or +``--install-base`` and ``--install-platbase``, but you can't mix from these +groups. -.. _packaging-alt-install-prefix: + +.. _packaging-alt-install-user: + +Alternate installation: the user scheme +--------------------------------------- + +This scheme is designed to be the most convenient solution for users that don't +have write permission to the global site-packages directory or don't want to +install into it. It is enabled with a simple option:: + + pysetup run install_dist --user + +Files will be installed into subdirectories of :data:`site.USER_BASE` (written +as :file:`{userbase}` hereafter). This scheme installs pure Python modules and +extension modules in the same location (also known as :data:`site.USER_SITE`). +Here are the values for UNIX, including non-framework builds on Mac OS X: + +=============== =========================================================== +Type of file Installation directory +=============== =========================================================== +modules :file:`{userbase}/lib/python{X.Y}/site-packages` +scripts :file:`{userbase}/bin` +data :file:`{userbase}` +C headers :file:`{userbase}/include/python{X.Y}` +=============== =========================================================== + +Framework builds on Mac OS X use these paths: + +=============== =========================================================== +Type of file Installation directory +=============== =========================================================== +modules :file:`{userbase}/lib/python/site-packages` +scripts :file:`{userbase}/bin` +data :file:`{userbase}` +C headers :file:`{userbase}/include/python` +=============== =========================================================== + +And here are the values used on Windows: + +=============== =========================================================== +Type of file Installation directory +=============== =========================================================== +modules :file:`{userbase}\\Python{XY}\\site-packages` +scripts :file:`{userbase}\\Scripts` +data :file:`{userbase}` +C headers :file:`{userbase}\\Python{XY}\\Include` +=============== =========================================================== + +The advantage of using this scheme compared to the other ones described below is +that the user site-packages directory is under normal conditions always included +in :data:`sys.path` (see :mod:`site` for more information), which means that +there is no additional step to perform after running ``pysetup`` to finalize the +installation. + +The :command:`build_ext` command also has a ``--user`` option to add +:file:`{userbase}/include` to the compiler search path for header files and +:file:`{userbase}/lib` to the compiler search path for libraries as well as to +the runtime search path for shared C libraries (rpath). + + +.. _packaging-alt-install-home: Alternate installation: the home scheme --------------------------------------- @@ -343,23 +415,27 @@ pysetup run install_dist --home ~ +To make Python find the distributions installed with this scheme, you may have +to :ref:`modify Python's search path ` or edit +:mod:`sitecustomize` (see :mod:`site`) to call :func:`site.addsitedir` or edit +:data:`sys.path`. + The :option:`--home` option defines the base directory for the installation. Under it, files are installed to the following directories: -+------------------------------+---------------------------+-----------------------------+ -| Type of file | Installation Directory | Override option | -+==============================+===========================+=============================+ -| pure module distribution | :file:`{home}/lib/python` | :option:`--install-purelib` | -+------------------------------+---------------------------+-----------------------------+ -| non-pure module distribution | :file:`{home}/lib/python` | :option:`--install-platlib` | -+------------------------------+---------------------------+-----------------------------+ -| scripts | :file:`{home}/bin` | :option:`--install-scripts` | -+------------------------------+---------------------------+-----------------------------+ -| data | :file:`{home}/share` | :option:`--install-data` | -+------------------------------+---------------------------+-----------------------------+ +=============== =========================================================== +Type of file Installation directory +=============== =========================================================== +modules :file:`{home}/lib/python` +scripts :file:`{home}/bin` +data :file:`{home}` +C headers :file:`{home}/include/python` +=============== =========================================================== +(Mentally replace slashes with backslashes if you're on Windows.) -.. _packaging-alt-install-home: + +.. _packaging-alt-install-prefix-unix: Alternate installation: Unix (the prefix scheme) ------------------------------------------------ @@ -368,8 +444,8 @@ run the build command, but install modules into the third-party module directory of a different Python installation (or something that looks like a different Python installation). If this sounds a trifle unusual, it is ---that's why the -"home scheme" comes first. However, there are at least two known cases where the -prefix scheme will be useful. +user and home schemes come before. However, there are at least two known cases +where the prefix scheme will be useful. First, consider that many Linux distributions put Python in :file:`/usr`, rather than the more traditional :file:`/usr/local`. This is entirely appropriate, @@ -396,17 +472,17 @@ executables, etc.) If :option:`--exec-prefix` is not supplied, it defaults to :option:`--prefix`. Files are installed as follows: -+------------------------------+-----------------------------------------------------+-----------------------------+ -| Type of file | Installation Directory | Override option | -+==============================+=====================================================+=============================+ -| pure module distribution | :file:`{prefix}/lib/python{X.Y}/site-packages` | :option:`--install-purelib` | -+------------------------------+-----------------------------------------------------+-----------------------------+ -| non-pure module distribution | :file:`{exec-prefix}/lib/python{X.Y}/site-packages` | :option:`--install-platlib` | -+------------------------------+-----------------------------------------------------+-----------------------------+ -| scripts | :file:`{prefix}/bin` | :option:`--install-scripts` | -+------------------------------+-----------------------------------------------------+-----------------------------+ -| data | :file:`{prefix}/share` | :option:`--install-data` | -+------------------------------+-----------------------------------------------------+-----------------------------+ +================= ========================================================== +Type of file Installation directory +================= ========================================================== +Python modules :file:`{prefix}/lib/python{X.Y}/site-packages` +extension modules :file:`{exec-prefix}/lib/python{X.Y}/site-packages` +scripts :file:`{prefix}/bin` +data :file:`{prefix}` +C headers :file:`{prefix}/include/python{X.Y}{abiflags}` +================= ========================================================== + +.. XXX misses an entry for platinclude There is no requirement that :option:`--prefix` or :option:`--exec-prefix` actually point to an alternate Python installation; if the directories listed @@ -432,7 +508,7 @@ this is immaterial.) -.. _packaging-alt-install-windows: +.. _packaging-alt-install-prefix-windows: Alternate installation: Windows (the prefix scheme) --------------------------------------------------- @@ -447,20 +523,18 @@ to install modules to the :file:`\\Temp\\Python` directory on the current drive. The installation base is defined by the :option:`--prefix` option; the -:option:`--exec-prefix` option is unsupported under Windows. Files are -installed as follows: +:option:`--exec-prefix` option is not supported under Windows, which means that +pure Python modules and extension modules are installed into the same location. +Files are installed as follows: -+------------------------------+---------------------------+-----------------------------+ -| Type of file | Installation Directory | Override option | -+==============================+===========================+=============================+ -| pure module distribution | :file:`{prefix}` | :option:`--install-purelib` | -+------------------------------+---------------------------+-----------------------------+ -| non-pure module distribution | :file:`{prefix}` | :option:`--install-platlib` | -+------------------------------+---------------------------+-----------------------------+ -| scripts | :file:`{prefix}\\Scripts` | :option:`--install-scripts` | -+------------------------------+---------------------------+-----------------------------+ -| data | :file:`{prefix}\\Data` | :option:`--install-data` | -+------------------------------+---------------------------+-----------------------------+ +=============== ========================================================== +Type of file Installation directory +=============== ========================================================== +modules :file:`{prefix}\\Lib\\site-packages` +scripts :file:`{prefix}\\Scripts` +data :file:`{prefix}` +C headers :file:`{prefix}\\Include` +=============== ========================================================== .. _packaging-custom-install: @@ -474,13 +548,29 @@ directory, or you might want to completely redefine the installation scheme. In either case, you're creating a *custom installation scheme*. -You probably noticed the column of "override options" in the tables describing -the alternate installation schemes above. Those options are how you define a -custom installation scheme. These override options can be relative, absolute, +To create a custom installation scheme, you start with one of the alternate +schemes and override some of the installation directories used for the various +types of files, using these options: + +====================== ======================= +Type of file Override option +====================== ======================= +Python modules ``--install-purelib`` +extension modules ``--install-platlib`` +all modules ``--install-lib`` +scripts ``--install-scripts`` +data ``--install-data`` +C headers ``--install-headers`` +====================== ======================= + +These override options can be relative, absolute, or explicitly defined in terms of one of the installation base directories. (There are two installation base directories, and they are normally the same ---they only differ when you use the Unix "prefix scheme" and supply different -:option:`--prefix` and :option:`--exec-prefix` options.) +``--prefix`` and ``--exec-prefix`` options; using ``--install-lib`` will +override values computed or given for ``--install-purelib`` and +``--install-platlib``, and is recommended for schemes that don't make a +difference between Python and extension modules.) For example, say you're installing a module distribution to your home directory under Unix, but you want scripts to go in :file:`~/scripts` rather than @@ -507,17 +597,18 @@ a subdirectory of :file:`{prefix}`, rather than right in :file:`{prefix}` itself. This is almost as easy as customizing the script installation directory ---you just have to remember that there are two types of modules to worry about, -pure modules and non-pure modules (i.e., modules from a non-pure distribution). -For example:: +Python and extension modules, which can conveniently be both controlled by one +option:: - pysetup run install_dist --install-purelib Site --install-platlib Site + pysetup run install_dist --install-lib Site .. XXX Nothing is installed right under prefix in windows, is it?? -The specified installation directories are relative to :file:`{prefix}`. Of -course, you also have to ensure that these directories are in Python's module -search path, such as by putting a :file:`.pth` file in :file:`{prefix}`. See -section :ref:`packaging-search-path` to find out how to modify Python's search path. +The specified installation directory is relative to :file:`{prefix}`. Of +course, you also have to ensure that this directory is in Python's module +search path, such as by putting a :file:`.pth` file in a site directory (see +:mod:`site`). See section :ref:`packaging-search-path` to find out how to modify +Python's search path. If you want to define an entire installation scheme, you just have to supply all of the installation directory options. Using relative paths is recommended here. diff --git a/Doc/library/shutil.rst b/Doc/library/shutil.rst --- a/Doc/library/shutil.rst +++ b/Doc/library/shutil.rst @@ -175,8 +175,9 @@ .. function:: disk_usage(path) - Return disk usage statistics about the given path as a namedtuple including - total, used and free space expressed in bytes. + Return disk usage statistics about the given path as a :term:`named tuple` + with the attributes *total*, *used* and *free*, which are the amount of + total, used and free space, in bytes. .. versionadded:: 3.3 diff --git a/Doc/library/site.rst b/Doc/library/site.rst --- a/Doc/library/site.rst +++ b/Doc/library/site.rst @@ -2,22 +2,24 @@ ================================================ .. module:: site - :synopsis: A standard way to reference site-specific modules. + :synopsis: Module responsible for site-specific configuration. **Source code:** :source:`Lib/site.py` -------------- +.. highlightlang:: none + **This module is automatically imported during initialization.** The automatic import can be suppressed using the interpreter's :option:`-S` option. .. index:: triple: module; search; path -Importing this module will append site-specific paths to the module search -path, unless :option:`-S` was used. In that case, this module can be safely -imported with no automatic modifications to the module search path. To -explicitly trigger the usual site-specific additions, call the -:func:`site.main` function. +Importing this module will append site-specific paths to the module search path +and add a few builtins, unless :option:`-S` was used. In that case, this module +can be safely imported with no automatic modifications to the module search path +or additions to the builtins. To explicitly trigger the usual site-specific +additions, call the :func:`site.main` function. .. versionchanged:: 3.3 Importing the module used to trigger paths manipulation even when using @@ -36,11 +38,11 @@ if it refers to an existing directory, and if so, adds it to ``sys.path`` and also inspects the newly added path for configuration files. -A path configuration file is a file whose name has the form :file:`package.pth` +A path configuration file is a file whose name has the form :file:`{name}.pth` and exists in one of the four directories mentioned above; its contents are additional items (one per line) to be added to ``sys.path``. Non-existing items -are never added to ``sys.path``, but no check is made that the item refers to a -directory (rather than a file). No item is added to ``sys.path`` more than +are never added to ``sys.path``, and no check is made that the item refers to a +directory rather than a file. No item is added to ``sys.path`` more than once. Blank lines and lines beginning with ``#`` are skipped. Lines starting with ``import`` (followed by space or tab) are executed. @@ -50,8 +52,7 @@ For example, suppose ``sys.prefix`` and ``sys.exec_prefix`` are set to :file:`/usr/local`. The Python X.Y library is then installed in -:file:`/usr/local/lib/python{X.Y}` (where only the first three characters of -``sys.version`` are used to form the installation path name). Suppose this has +:file:`/usr/local/lib/python{X.Y}`. Suppose this has a subdirectory :file:`/usr/local/lib/python{X.Y}/site-packages` with three subsubdirectories, :file:`foo`, :file:`bar` and :file:`spam`, and two path configuration files, :file:`foo.pth` and :file:`bar.pth`. Assume @@ -84,42 +85,59 @@ After these path manipulations, an attempt is made to import a module named :mod:`sitecustomize`, which can perform arbitrary site-specific customizations. -If this import fails with an :exc:`ImportError` exception, it is silently -ignored. +It is typically created by a system administrator in the site-packages +directory. If this import fails with an :exc:`ImportError` exception, it is +silently ignored. -.. index:: module: sitecustomize +.. index:: module: usercustomize + +After this, an attempt is made to import a module named :mod:`usercustomize`, +which can perform arbitrary user-specific customizations, if +:data:`ENABLE_USER_SITE` is true. This file is intended to be created in the +user site-packages directory (see below), which is part of ``sys.path`` unless +disabled by :option:`-s`. An :exc:`ImportError` will be silently ignored. Note that for some non-Unix systems, ``sys.prefix`` and ``sys.exec_prefix`` are empty, and the path manipulations are skipped; however the import of -:mod:`sitecustomize` is still attempted. +:mod:`sitecustomize` and :mod:`usercustomize` is still attempted. .. data:: PREFIXES - A list of prefixes for site package directories + A list of prefixes for site-packages directories. .. data:: ENABLE_USER_SITE - Flag showing the status of the user site directory. True means the - user site directory is enabled and added to sys.path. When the flag - is None the user site directory is disabled for security reasons. + Flag showing the status of the user site-packages directory. ``True`` means + that it is enabled and was added to ``sys.path``. ``False`` means that it + was disabled by user request (with :option:`-s` or + :envvar:`PYTHONNOUSERSITE`). ``None`` means it was disabled for security + reasons (mismatch between user or group id and effective id) or by an + administrator. .. data:: USER_SITE - Path to the user site directory for the current Python version or None + Path to the user site-packages for the running Python. Can be ``None`` if + :func:`getusersitepackages` hasn't been called yet. Default value is + :file:`~/.local/lib/python{X.Y}/site-packages` for UNIX and non-framework Mac + OS X builds, :file:`~/Library/Python/{X.Y}/lib/python/site-packages` for Mac + framework builds, and :file:`{%APPDATA%}\\Python\\Python{XY}\\site-packages` + on Windows. This directory is a site directory, which means that + :file:`.pth` files in it will be processed. .. data:: USER_BASE - Path to the base directory for user site directories - - -.. envvar:: PYTHONNOUSERSITE - - -.. envvar:: PYTHONUSERBASE + Path to the base directory for the user site-packages. Can be ``None`` if + :func:`getuserbase` hasn't been called yet. Default value is + :file:`~/.local` for UNIX and Mac OS X non-framework builds, + :file:`~/Library/Python/{X.Y}` for Mac framework builds, and + :file:`{%APPDATA%}\\Python` for Windows. This value is used by Packaging to + compute the installation directories for scripts, data files, Python modules, + etc. for the :ref:`user installation scheme `. + See also :envvar:`PYTHONUSERBASE`. .. function:: main() @@ -135,34 +153,67 @@ .. function:: addsitedir(sitedir, known_paths=None) - Adds a directory to sys.path and processes its pth files. + Add a directory to sys.path and process its :file:`.pth` files. Typically + used in :mod:`sitecustomize` or :mod:`usercustomize` (see above). + .. function:: getsitepackages() - Returns a list containing all global site-packages directories - (and possibly site-python). + Return a list containing all global site-packages directories (and possibly + site-python). .. versionadded:: 3.2 + .. function:: getuserbase() - Returns the "user base" directory path. - - The "user base" directory can be used to store data. If the global - variable ``USER_BASE`` is not initialized yet, this function will also set - it. + Return the path of the user base directory, :data:`USER_BASE`. If it is not + initialized yet, this function will also set it, respecting + :envvar:`PYTHONUSERBASE`. .. versionadded:: 3.2 + .. function:: getusersitepackages() - Returns the user-specific site-packages directory path. - - If the global variable ``USER_SITE`` is not initialized yet, this - function will also set it. + Return the path of the user-specific site-packages directory, + :data:`USER_SITE`. If it is not initialized yet, this function will also set + it, respecting :envvar:`PYTHONNOUSERSITE` and :data:`USER_BASE`. .. versionadded:: 3.2 -.. XXX Update documentation -.. XXX document python -m site --user-base --user-site +The :mod:`site` module also provides a way to get the user directories from the +command line: + +.. code-block:: sh + + $ python3 -m site --user-site + /home/user/.local/lib/python3.3/site-packages + +.. program:: site + +If it is called without arguments, it will print the contents of +:data:`sys.path` on the standard output, followed by the value of +:data:`USER_BASE` and whether the directory exists, then the same thing for +:data:`USER_SITE`, and finally the value of :data:`ENABLE_USER_SITE`. + +.. cmdoption:: --user-base + + Print the path to the user base directory. + +.. cmdoption:: --user-site + + Print the path to the user site-packages directory. + +If both options are given, user base and user site will be printed (always in +this order), separated by :data:`os.pathsep`. + +If any option is given, the script will exit with one of these values: ``O`` if +the user site-packages directory is enabled, ``1`` if it was disabled by the +user, ``2`` if it is disabled for security reasons or by an administrator, and a +value greater than 2 if there is an error. + +.. seealso:: + + :pep:`370` -- Per user site-packages directory diff --git a/Doc/packaging/commandref.rst b/Doc/packaging/commandref.rst --- a/Doc/packaging/commandref.rst +++ b/Doc/packaging/commandref.rst @@ -12,6 +12,12 @@ description. Use pysetup run --help to get help about the options of one command. +.. XXX sections from this document should be merged with other docs (e.g. check + and upload with uploading.rst, install_* with install/install.rst, etc.); + there is no value in partially duplicating information. this file could + however serve as an index, i.e. just a list of all commands with links to + every section that describes options or usage + Preparing distributions ======================= @@ -310,13 +316,8 @@ ----------------------- Install a distribution, delegating to the other :command:`install_*` commands to -do the work. - -.. program:: packaging install_dist - -.. cmdoption:: --user - - Install in user site-packages directory (see :PEP:`370`). +do the work. See :ref:`packaging-how-install-works` for complete usage +instructions. :command:`install_data` diff --git a/Doc/tutorial/interactive.rst b/Doc/tutorial/interactive.rst --- a/Doc/tutorial/interactive.rst +++ b/Doc/tutorial/interactive.rst @@ -156,17 +156,18 @@ quotes, etc., would also be useful. One alternative enhanced interactive interpreter that has been around for quite -some time is `IPython`_, which features tab completion, object exploration and +some time is IPython_, which features tab completion, object exploration and advanced history management. It can also be thoroughly customized and embedded into other applications. Another similar enhanced interactive environment is -`bpython`_. +bpython_. .. rubric:: Footnotes .. [#] Python will execute the contents of a file identified by the :envvar:`PYTHONSTARTUP` environment variable when you start an interactive - interpreter. + interpreter. To customize Python even for non-interactive mode, see + :ref:`tut-customize`. .. _GNU Readline: http://tiswww.case.edu/php/chet/readline/rltop.html diff --git a/Doc/tutorial/interpreter.rst b/Doc/tutorial/interpreter.rst --- a/Doc/tutorial/interpreter.rst +++ b/Doc/tutorial/interpreter.rst @@ -236,6 +236,29 @@ exec(open(filename).read()) +.. _tut-customize: + +The Customization Modules +------------------------- + +Python provides two hooks to let you customize it: :mod:`sitecustomize` and +:mod:`usercustomize`. To see how it works, you need first to find the location +of your user site-packages directory. Start Python and run this code: + + >>> import site + >>> site.getusersitepackages() + '/home/user/.local/lib/python3.2/site-packages' + +Now you can create a file named :file:`usercustomize.py` in that directory and +put anything you want in it. It will affect every invocation of Python, unless +it is started with the :option:`-s` option to disable the automatic import. + +:mod:`sitecustomize` works in the same way, but is typically created by an +administrator of the computer in the global site-packages directory, and is +imported before :mod:`usercustomize`. See the documentation of the :mod:`site` +module for more details. + + .. rubric:: Footnotes .. [#] On Unix, the Python 3.x interpreter is by default not installed with the @@ -243,4 +266,3 @@ simultaneously installed Python 2.x executable. .. [#] A problem with the GNU Readline package may prevent this. - diff --git a/Doc/using/cmdline.rst b/Doc/using/cmdline.rst --- a/Doc/using/cmdline.rst +++ b/Doc/using/cmdline.rst @@ -229,7 +229,8 @@ .. cmdoption:: -s - Don't add user site directory to sys.path + Don't add the :data:`user site-packages directory ` to + :data:`sys.path`. .. seealso:: @@ -470,7 +471,8 @@ .. envvar:: PYTHONNOUSERSITE - If this is set, Python won't add the user site directory to sys.path + If this is set, Python won't add the :data:`user site-packages directory + ` to :data:`sys.path`. .. seealso:: @@ -479,7 +481,10 @@ .. envvar:: PYTHONUSERBASE - Sets the base directory for the user site directory + Defines the :data:`user base directory `, which is used to + compute the path of the :data:`user site-packages directory ` + and :ref:`Packaging installation paths ` for + ``pysetup run install_dist --user``. .. seealso:: diff --git a/Lib/shutil.py b/Lib/shutil.py --- a/Lib/shutil.py +++ b/Lib/shutil.py @@ -12,7 +12,6 @@ import collections import errno import tarfile -from collections import namedtuple try: import bz2 @@ -36,6 +35,7 @@ "register_archive_format", "unregister_archive_format", "get_unpack_formats", "register_unpack_format", "unregister_unpack_format", "unpack_archive"] + # disk_usage is added later, if available on the platform class Error(EnvironmentError): pass @@ -756,20 +756,36 @@ kwargs = dict(_UNPACK_FORMATS[format][2]) func(filename, extract_dir, **kwargs) -if hasattr(os, "statvfs") or os.name == 'nt': - _ntuple_diskusage = namedtuple('usage', 'total used free') + +if hasattr(os, 'statvfs'): + + __all__.append('disk_usage') + _ntuple_diskusage = collections.namedtuple('usage', 'total used free') def disk_usage(path): - """Return disk usage statistics about the given path as a namedtuple - including total, used and free space expressed in bytes. + """Return disk usage statistics about the given path. + + Returned valus is a named tuple with attributes 'total', 'used' and + 'free', which are the amount of total, used and free space, in bytes. """ - if hasattr(os, "statvfs"): - st = os.statvfs(path) - free = (st.f_bavail * st.f_frsize) - total = (st.f_blocks * st.f_frsize) - used = (st.f_blocks - st.f_bfree) * st.f_frsize - else: - import nt - total, free = nt._getdiskusage(path) - used = total - free + st = os.statvfs(path) + free = st.f_bavail * st.f_frsize + total = st.f_blocks * st.f_frsize + used = (st.f_blocks - st.f_bfree) * st.f_frsize return _ntuple_diskusage(total, used, free) + +elif os.name == 'nt': + + import nt + __all__.append('disk_usage') + _ntuple_diskusage = collections.namedtuple('usage', 'total used free') + + def disk_usage(path): + """Return disk usage statistics about the given path. + + Returned valus is a named tuple with attributes 'total', 'used' and + 'free', which are the amount of total, used and free space, in bytes. + """ + total, free = nt._getdiskusage(path) + used = total - free + return _ntuple_diskusage(total, used, free) -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Tue Aug 9 18:11:18 2011 From: python-checkins at python.org (eric.araujo) Date: Tue, 09 Aug 2011 18:11:18 +0200 Subject: [Python-checkins] =?utf8?b?Y3B5dGhvbiAobWVyZ2UgMy4yIC0+IDMuMik6?= =?utf8?q?_Branch_merge?= Message-ID: http://hg.python.org/cpython/rev/66a52da1c9da changeset: 71783:66a52da1c9da branch: 3.2 parent: 71772:41b816853819 parent: 71779:1b0b5f644090 user: ?ric Araujo date: Mon Aug 08 16:56:00 2011 +0200 summary: Branch merge files: Doc/install/index.rst | 184 ++++++++++++++++------ Doc/library/site.rst | 126 +++++++++++---- Doc/tutorial/interactive.rst | 7 +- Doc/tutorial/interpreter.rst | 24 ++- Doc/using/cmdline.rst | 11 +- 5 files changed, 255 insertions(+), 97 deletions(-) diff --git a/Doc/install/index.rst b/Doc/install/index.rst --- a/Doc/install/index.rst +++ b/Doc/install/index.rst @@ -279,6 +279,14 @@ >>> sys.exec_prefix '/usr' +A few other placeholders are used in this document: :file:`{X.Y}` stands for the +version of Python, for example ``3.2``; :file:`{abiflags}` will be replaced by +the value of :data:`sys.abiflags` or the empty string for platforms which don't +define ABI flags; :file:`{distname}` will be replaced by the name of the module +distribution being installed. Dots and capitalization are important in the +paths; for example, a value that uses ``python3.2`` on UNIX will typically use +``Python32`` on Windows. + If you don't want to install modules to the standard location, or if you don't have permission to write there, then you need to read about alternate installations in section :ref:`inst-alt-install`. If you want to customize your @@ -307,8 +315,61 @@ differ across platforms, so read whichever of the following sections applies to you. +Note that the various alternate installation schemes are mutually exclusive: you +can pass ``--user``, or ``--home``, or ``--prefix`` and ``--exec-prefix``, or +``--install-base`` and ``--install-platbase``, but you can't mix from these +groups. -.. _inst-alt-install-prefix: + +.. _inst-alt-install-user: + +Alternate installation: the user scheme +--------------------------------------- + +This scheme is designed to be the most convenient solution for users that don't +have write permission to the global site-packages directory or don't want to +install into it. It is enabled with a simple option:: + + python setup.py install --user + +Files will be installed into subdirectories of :data:`site.USER_BASE` (written +as :file:`{userbase}` hereafter). This scheme installs pure Python modules and +extension modules in the same location (also known as :data:`site.USER_SITE`). +Here are the values for UNIX, including Mac OS X: + +=============== =========================================================== +Type of file Installation directory +=============== =========================================================== +modules :file:`{userbase}/lib/python{X.Y}/site-packages` +scripts :file:`{userbase}/bin` +data :file:`{userbase}` +C headers :file:`{userbase}/include/python{X.Y}{abiflags}/{distname}` +=============== =========================================================== + +And here are the values used on Windows: + +=============== =========================================================== +Type of file Installation directory +=============== =========================================================== +modules :file:`{userbase}\\Python{XY}\\site-packages` +scripts :file:`{userbase}\\Scripts` +data :file:`{userbase}` +C headers :file:`{userbase}\\Python{XY}\\Include\\{distname}` +=============== =========================================================== + +The advantage of using this scheme compared to the other ones described below is +that the user site-packages directory is under normal conditions always included +in :data:`sys.path` (see :mod:`site` for more information), which means that +there is no additional step to perform after running the :file:`setup.py` script +to finalize the installation. + +The :command:`build_ext` command also has a ``--user`` option to add +:file:`{userbase}/include` to the compiler search path for header files and +:file:`{userbase}/lib` to the compiler search path for libraries as well as to +the runtime search path for shared C libraries (rpath). + + +.. _inst-alt-install-home: Alternate installation: the home scheme --------------------------------------- @@ -330,23 +391,27 @@ python setup.py install --home=~ +To make Python find the distributions installed with this scheme, you may have +to :ref:`modify Python's search path ` or edit +:mod:`sitecustomize` (see :mod:`site`) to call :func:`site.addsitedir` or edit +:data:`sys.path`. + The :option:`--home` option defines the installation base directory. Files are installed to the following directories under the installation base as follows: -+------------------------------+---------------------------+-----------------------------+ -| Type of file | Installation Directory | Override option | -+==============================+===========================+=============================+ -| pure module distribution | :file:`{home}/lib/python` | :option:`--install-purelib` | -+------------------------------+---------------------------+-----------------------------+ -| non-pure module distribution | :file:`{home}/lib/python` | :option:`--install-platlib` | -+------------------------------+---------------------------+-----------------------------+ -| scripts | :file:`{home}/bin` | :option:`--install-scripts` | -+------------------------------+---------------------------+-----------------------------+ -| data | :file:`{home}/share` | :option:`--install-data` | -+------------------------------+---------------------------+-----------------------------+ +=============== =========================================================== +Type of file Installation directory +=============== =========================================================== +modules :file:`{home}/lib/python` +scripts :file:`{home}/bin` +data :file:`{home}` +C headers :file:`{home}/include/python/{distname}` +=============== =========================================================== +(Mentally replace slashes with backslashes if you're on Windows.) -.. _inst-alt-install-home: + +.. _inst-alt-install-prefix-unix: Alternate installation: Unix (the prefix scheme) ------------------------------------------------ @@ -355,7 +420,7 @@ perform the build/install (i.e., to run the setup script), but install modules into the third-party module directory of a different Python installation (or something that looks like a different Python installation). If this sounds a -trifle unusual, it is---that's why the "home scheme" comes first. However, +trifle unusual, it is---that's why the user and home schemes come before. However, there are at least two known cases where the prefix scheme will be useful. First, consider that many Linux distributions put Python in :file:`/usr`, rather @@ -383,17 +448,15 @@ executables, etc.) If :option:`--exec-prefix` is not supplied, it defaults to :option:`--prefix`. Files are installed as follows: -+------------------------------+-----------------------------------------------------+-----------------------------+ -| Type of file | Installation Directory | Override option | -+==============================+=====================================================+=============================+ -| pure module distribution | :file:`{prefix}/lib/python{X.Y}/site-packages` | :option:`--install-purelib` | -+------------------------------+-----------------------------------------------------+-----------------------------+ -| non-pure module distribution | :file:`{exec-prefix}/lib/python{X.Y}/site-packages` | :option:`--install-platlib` | -+------------------------------+-----------------------------------------------------+-----------------------------+ -| scripts | :file:`{prefix}/bin` | :option:`--install-scripts` | -+------------------------------+-----------------------------------------------------+-----------------------------+ -| data | :file:`{prefix}/share` | :option:`--install-data` | -+------------------------------+-----------------------------------------------------+-----------------------------+ +================= ========================================================== +Type of file Installation directory +================= ========================================================== +Python modules :file:`{prefix}/lib/python{X.Y}/site-packages` +extension modules :file:`{exec-prefix}/lib/python{X.Y}/site-packages` +scripts :file:`{prefix}/bin` +data :file:`{prefix}` +C headers :file:`{prefix}/include/python{X.Y}{abiflags}/{distname}` +================= ========================================================== There is no requirement that :option:`--prefix` or :option:`--exec-prefix` actually point to an alternate Python installation; if the directories listed @@ -418,7 +481,7 @@ alternate Python installation, this is immaterial.) -.. _inst-alt-install-windows: +.. _inst-alt-install-prefix-windows: Alternate installation: Windows (the prefix scheme) --------------------------------------------------- @@ -433,20 +496,18 @@ to install modules to the :file:`\\Temp\\Python` directory on the current drive. The installation base is defined by the :option:`--prefix` option; the -:option:`--exec-prefix` option is not supported under Windows. Files are -installed as follows: +:option:`--exec-prefix` option is not supported under Windows, which means that +pure Python modules and extension modules are installed into the same location. +Files are installed as follows: -+------------------------------+---------------------------+-----------------------------+ -| Type of file | Installation Directory | Override option | -+==============================+===========================+=============================+ -| pure module distribution | :file:`{prefix}` | :option:`--install-purelib` | -+------------------------------+---------------------------+-----------------------------+ -| non-pure module distribution | :file:`{prefix}` | :option:`--install-platlib` | -+------------------------------+---------------------------+-----------------------------+ -| scripts | :file:`{prefix}\\Scripts` | :option:`--install-scripts` | -+------------------------------+---------------------------+-----------------------------+ -| data | :file:`{prefix}\\Data` | :option:`--install-data` | -+------------------------------+---------------------------+-----------------------------+ +=============== ========================================================== +Type of file Installation directory +=============== ========================================================== +modules :file:`{prefix}\\Lib\\site-packages` +scripts :file:`{prefix}\\Scripts` +data :file:`{prefix}` +C headers :file:`{prefix}\\Include\\{distname}` +=============== ========================================================== .. _inst-custom-install: @@ -460,13 +521,29 @@ or you might want to completely redefine the installation scheme. In either case, you're creating a *custom installation scheme*. -You probably noticed the column of "override options" in the tables describing -the alternate installation schemes above. Those options are how you define a -custom installation scheme. These override options can be relative, absolute, +To create a custom installation scheme, you start with one of the alternate +schemes and override some of the installation directories used for the various +types of files, using these options: + +====================== ======================= +Type of file Override option +====================== ======================= +Python modules ``--install-purelib`` +extension modules ``--install-platlib`` +all modules ``--install-lib`` +scripts ``--install-scripts`` +data ``--install-data`` +C headers ``--install-headers`` +====================== ======================= + +These override options can be relative, absolute, or explicitly defined in terms of one of the installation base directories. (There are two installation base directories, and they are normally the same--- they only differ when you use the Unix "prefix scheme" and supply different -:option:`--prefix` and :option:`--exec-prefix` options.) +``--prefix`` and ``--exec-prefix`` options; using ``--install-lib`` will +override values computed or given for ``--install-purelib`` and +``--install-platlib``, and is recommended for schemes that don't make a +difference between Python and extension modules.) For example, say you're installing a module distribution to your home directory under Unix---but you want scripts to go in :file:`~/scripts` rather than @@ -493,15 +570,16 @@ a subdirectory of :file:`{prefix}`, rather than right in :file:`{prefix}` itself. This is almost as easy as customizing the script installation directory ---you just have to remember that there are two types of modules to worry about, -pure modules and non-pure modules (i.e., modules from a non-pure distribution). -For example:: +Python and extension modules, which can conveniently be both controlled by one +option:: - python setup.py install --install-purelib=Site --install-platlib=Site + python setup.py install --install-lib=Site -The specified installation directories are relative to :file:`{prefix}`. Of -course, you also have to ensure that these directories are in Python's module -search path, such as by putting a :file:`.pth` file in :file:`{prefix}`. See -section :ref:`inst-search-path` to find out how to modify Python's search path. +The specified installation directory is relative to :file:`{prefix}`. Of +course, you also have to ensure that this directory is in Python's module +search path, such as by putting a :file:`.pth` file in a site directory (see +:mod:`site`). See section :ref:`inst-search-path` to find out how to modify +Python's search path. If you want to define an entire installation scheme, you just have to supply all of the installation directory options. The recommended way to do this is to @@ -553,8 +631,8 @@ python setup.py install --install-base=/tmp -would install pure modules to :file:`{/tmp/python/lib}` in the first case, and -to :file:`{/tmp/lib}` in the second case. (For the second case, you probably +would install pure modules to :file:`/tmp/python/lib` in the first case, and +to :file:`/tmp/lib` in the second case. (For the second case, you probably want to supply an installation base of :file:`/tmp/python`.) You probably noticed the use of ``$HOME`` and ``$PLAT`` in the sample @@ -571,7 +649,7 @@ needed on those platforms? -.. XXX I'm not sure where this section should go. +.. XXX Move this to Doc/using .. _inst-search-path: diff --git a/Doc/library/site.rst b/Doc/library/site.rst --- a/Doc/library/site.rst +++ b/Doc/library/site.rst @@ -2,18 +2,21 @@ ================================================ .. module:: site - :synopsis: A standard way to reference site-specific modules. + :synopsis: Module responsible for site-specific configuration. **Source code:** :source:`Lib/site.py` -------------- +.. highlightlang:: none + **This module is automatically imported during initialization.** The automatic import can be suppressed using the interpreter's :option:`-S` option. .. index:: triple: module; search; path -Importing this module will append site-specific paths to the module search path. +Importing this module will append site-specific paths to the module search path +and add a few builtins. .. index:: pair: site-python; directory @@ -28,11 +31,11 @@ if it refers to an existing directory, and if so, adds it to ``sys.path`` and also inspects the newly added path for configuration files. -A path configuration file is a file whose name has the form :file:`package.pth` +A path configuration file is a file whose name has the form :file:`{name}.pth` and exists in one of the four directories mentioned above; its contents are additional items (one per line) to be added to ``sys.path``. Non-existing items -are never added to ``sys.path``, but no check is made that the item refers to a -directory (rather than a file). No item is added to ``sys.path`` more than +are never added to ``sys.path``, and no check is made that the item refers to a +directory rather than a file. No item is added to ``sys.path`` more than once. Blank lines and lines beginning with ``#`` are skipped. Lines starting with ``import`` (followed by space or tab) are executed. @@ -42,8 +45,7 @@ For example, suppose ``sys.prefix`` and ``sys.exec_prefix`` are set to :file:`/usr/local`. The Python X.Y library is then installed in -:file:`/usr/local/lib/python{X.Y}` (where only the first three characters of -``sys.version`` are used to form the installation path name). Suppose this has +:file:`/usr/local/lib/python{X.Y}`. Suppose this has a subdirectory :file:`/usr/local/lib/python{X.Y}/site-packages` with three subsubdirectories, :file:`foo`, :file:`bar` and :file:`spam`, and two path configuration files, :file:`foo.pth` and :file:`bar.pth`. Assume @@ -76,74 +78,124 @@ After these path manipulations, an attempt is made to import a module named :mod:`sitecustomize`, which can perform arbitrary site-specific customizations. -If this import fails with an :exc:`ImportError` exception, it is silently -ignored. +It is typically created by a system administrator in the site-packages +directory. If this import fails with an :exc:`ImportError` exception, it is +silently ignored. -.. index:: module: sitecustomize +.. index:: module: usercustomize + +After this, an attempt is made to import a module named :mod:`usercustomize`, +which can perform arbitrary user-specific customizations, if +:data:`ENABLE_USER_SITE` is true. This file is intended to be created in the +user site-packages directory (see below), which is part of ``sys.path`` unless +disabled by :option:`-s`. An :exc:`ImportError` will be silently ignored. Note that for some non-Unix systems, ``sys.prefix`` and ``sys.exec_prefix`` are empty, and the path manipulations are skipped; however the import of -:mod:`sitecustomize` is still attempted. +:mod:`sitecustomize` and :mod:`usercustomize` is still attempted. .. data:: PREFIXES - A list of prefixes for site package directories + A list of prefixes for site-packages directories. .. data:: ENABLE_USER_SITE - Flag showing the status of the user site directory. True means the - user site directory is enabled and added to sys.path. When the flag - is None the user site directory is disabled for security reasons. + Flag showing the status of the user site-packages directory. ``True`` means + that it is enabled and was added to ``sys.path``. ``False`` means that it + was disabled by user request (with :option:`-s` or + :envvar:`PYTHONNOUSERSITE`). ``None`` means it was disabled for security + reasons (mismatch between user or group id and effective id) or by an + administrator. .. data:: USER_SITE - Path to the user site directory for the current Python version or None + Path to the user site-packages for the running Python. Can be ``None`` if + :func:`getusersitepackages` hasn't been called yet. Default value is + :file:`~/.local/lib/python{X.Y}/site-packages` for UNIX and non-framework Mac + OS X builds, :file:`~/Library/Python/{X.Y}/lib/python/site-packages` for Mac + framework builds, and :file:`{%APPDATA%}\\Python\\Python{XY}\\site-packages` + on Windows. This directory is a site directory, which means that + :file:`.pth` files in it will be processed. .. data:: USER_BASE - Path to the base directory for user site directories - - -.. envvar:: PYTHONNOUSERSITE - - -.. envvar:: PYTHONUSERBASE + Path to the base directory for the user site-packages. Can be ``None`` if + :func:`getuserbase` hasn't been called yet. Default value is + :file:`~/.local` for UNIX and Mac OS X non-framework builds, + :file:`~/Library/Python/{X.Y}` for Mac framework builds, and + :file:`{%APPDATA%}\\Python` for Windows. This value is used by Distutils to + compute the installation directories for scripts, data files, Python modules, + etc. for the :ref:`user installation scheme `. See + also :envvar:`PYTHONUSERBASE`. .. function:: addsitedir(sitedir, known_paths=None) - Adds a directory to sys.path and processes its pth files. + Add a directory to sys.path and process its :file:`.pth` files. Typically + used in :mod:`sitecustomize` or :mod:`usercustomize` (see above). + .. function:: getsitepackages() - Returns a list containing all global site-packages directories - (and possibly site-python). + Return a list containing all global site-packages directories (and possibly + site-python). .. versionadded:: 3.2 + .. function:: getuserbase() - Returns the "user base" directory path. - - The "user base" directory can be used to store data. If the global - variable ``USER_BASE`` is not initialized yet, this function will also set - it. + Return the path of the user base directory, :data:`USER_BASE`. If it is not + initialized yet, this function will also set it, respecting + :envvar:`PYTHONUSERBASE`. .. versionadded:: 3.2 + .. function:: getusersitepackages() - Returns the user-specific site-packages directory path. - - If the global variable ``USER_SITE`` is not initialized yet, this - function will also set it. + Return the path of the user-specific site-packages directory, + :data:`USER_SITE`. If it is not initialized yet, this function will also set + it, respecting :envvar:`PYTHONNOUSERSITE` and :data:`USER_BASE`. .. versionadded:: 3.2 -.. XXX Update documentation -.. XXX document python -m site --user-base --user-site +The :mod:`site` module also provides a way to get the user directories from the +command line: + +.. code-block:: sh + + $ python3 -m site --user-site + /home/user/.local/lib/python3.3/site-packages + +.. program:: site + +If it is called without arguments, it will print the contents of +:data:`sys.path` on the standard output, followed by the value of +:data:`USER_BASE` and whether the directory exists, then the same thing for +:data:`USER_SITE`, and finally the value of :data:`ENABLE_USER_SITE`. + +.. cmdoption:: --user-base + + Print the path to the user base directory. + +.. cmdoption:: --user-site + + Print the path to the user site-packages directory. + +If both options are given, user base and user site will be printed (always in +this order), separated by :data:`os.pathsep`. + +If any option is given, the script will exit with one of these values: ``O`` if +the user site-packages directory is enabled, ``1`` if it was disabled by the +user, ``2`` if it is disabled for security reasons or by an administrator, and a +value greater than 2 if there is an error. + +.. seealso:: + + :pep:`370` -- Per user site-packages directory diff --git a/Doc/tutorial/interactive.rst b/Doc/tutorial/interactive.rst --- a/Doc/tutorial/interactive.rst +++ b/Doc/tutorial/interactive.rst @@ -156,17 +156,18 @@ quotes, etc., would also be useful. One alternative enhanced interactive interpreter that has been around for quite -some time is `IPython`_, which features tab completion, object exploration and +some time is IPython_, which features tab completion, object exploration and advanced history management. It can also be thoroughly customized and embedded into other applications. Another similar enhanced interactive environment is -`bpython`_. +bpython_. .. rubric:: Footnotes .. [#] Python will execute the contents of a file identified by the :envvar:`PYTHONSTARTUP` environment variable when you start an interactive - interpreter. + interpreter. To customize Python even for non-interactive mode, see + :ref:`tut-customize`. .. _GNU Readline: http://tiswww.case.edu/php/chet/readline/rltop.html diff --git a/Doc/tutorial/interpreter.rst b/Doc/tutorial/interpreter.rst --- a/Doc/tutorial/interpreter.rst +++ b/Doc/tutorial/interpreter.rst @@ -236,6 +236,29 @@ exec(open(filename).read()) +.. _tut-customize: + +The Customization Modules +------------------------- + +Python provides two hooks to let you customize it: :mod:`sitecustomize` and +:mod:`usercustomize`. To see how it works, you need first to find the location +of your user site-packages directory. Start Python and run this code: + + >>> import site + >>> site.getusersitepackages() + '/home/user/.local/lib/python3.2/site-packages' + +Now you can create a file named :file:`usercustomize.py` in that directory and +put anything you want in it. It will affect every invocation of Python, unless +it is started with the :option:`-s` option to disable the automatic import. + +:mod:`sitecustomize` works in the same way, but is typically created by an +administrator of the computer in the global site-packages directory, and is +imported before :mod:`usercustomize`. See the documentation of the :mod:`site` +module for more details. + + .. rubric:: Footnotes .. [#] On Unix, the Python 3.x interpreter is by default not installed with the @@ -243,4 +266,3 @@ simultaneously installed Python 2.x executable. .. [#] A problem with the GNU Readline package may prevent this. - diff --git a/Doc/using/cmdline.rst b/Doc/using/cmdline.rst --- a/Doc/using/cmdline.rst +++ b/Doc/using/cmdline.rst @@ -229,7 +229,8 @@ .. cmdoption:: -s - Don't add user site directory to sys.path + Don't add the :data:`user site-packages directory ` to + :data:`sys.path`. .. seealso:: @@ -468,7 +469,8 @@ .. envvar:: PYTHONNOUSERSITE - If this is set, Python won't add the user site directory to sys.path + If this is set, Python won't add the :data:`user site-packages directory + ` to :data:`sys.path`. .. seealso:: @@ -477,7 +479,10 @@ .. envvar:: PYTHONUSERBASE - Sets the base directory for the user site directory + Defines the :data:`user base directory `, which is used to + compute the path of the :data:`user site-packages directory ` + and :ref:`Distutils installation paths ` for ``python + setup.py install --user``. .. seealso:: -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Tue Aug 9 18:11:18 2011 From: python-checkins at python.org (eric.araujo) Date: Tue, 09 Aug 2011 18:11:18 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=28merge_3=2E2_-=3E_default=29?= =?utf8?q?=3A_Merge_3=2E2?= Message-ID: http://hg.python.org/cpython/rev/fd5ffb9f4b53 changeset: 71784:fd5ffb9f4b53 parent: 71782:619889035734 parent: 71783:66a52da1c9da user: ?ric Araujo date: Mon Aug 08 16:56:46 2011 +0200 summary: Merge 3.2 files: -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Tue Aug 9 18:11:19 2011 From: python-checkins at python.org (eric.araujo) Date: Tue, 09 Aug 2011 18:11:19 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=28merge_default_-=3E_default?= =?utf8?q?=29=3A_Branch_merge?= Message-ID: http://hg.python.org/cpython/rev/bc3c01a08e7d changeset: 71785:bc3c01a08e7d parent: 71777:1777df2ce50d parent: 71784:fd5ffb9f4b53 user: ?ric Araujo date: Tue Aug 09 18:01:52 2011 +0200 summary: Branch merge files: Doc/distutils/install.rst | 184 +++++++++++++++------ Doc/install/install.rst | 193 ++++++++++++++++------ Doc/library/shutil.rst | 5 +- Doc/library/site.rst | 133 ++++++++++---- Doc/packaging/commandref.rst | 15 +- Doc/tutorial/interactive.rst | 7 +- Doc/tutorial/interpreter.rst | 24 ++- Doc/using/cmdline.rst | 11 +- Lib/shutil.py | 44 +++- 9 files changed, 441 insertions(+), 175 deletions(-) diff --git a/Doc/distutils/install.rst b/Doc/distutils/install.rst --- a/Doc/distutils/install.rst +++ b/Doc/distutils/install.rst @@ -279,6 +279,14 @@ >>> sys.exec_prefix '/usr' +A few other placeholders are used in this document: :file:`{X.Y}` stands for the +version of Python, for example ``3.2``; :file:`{abiflags}` will be replaced by +the value of :data:`sys.abiflags` or the empty string for platforms which don't +define ABI flags; :file:`{distname}` will be replaced by the name of the module +distribution being installed. Dots and capitalization are important in the +paths; for example, a value that uses ``python3.2`` on UNIX will typically use +``Python32`` on Windows. + If you don't want to install modules to the standard location, or if you don't have permission to write there, then you need to read about alternate installations in section :ref:`inst-alt-install`. If you want to customize your @@ -307,8 +315,61 @@ differ across platforms, so read whichever of the following sections applies to you. +Note that the various alternate installation schemes are mutually exclusive: you +can pass ``--user``, or ``--home``, or ``--prefix`` and ``--exec-prefix``, or +``--install-base`` and ``--install-platbase``, but you can't mix from these +groups. -.. _inst-alt-install-prefix: + +.. _inst-alt-install-user: + +Alternate installation: the user scheme +--------------------------------------- + +This scheme is designed to be the most convenient solution for users that don't +have write permission to the global site-packages directory or don't want to +install into it. It is enabled with a simple option:: + + python setup.py install --user + +Files will be installed into subdirectories of :data:`site.USER_BASE` (written +as :file:`{userbase}` hereafter). This scheme installs pure Python modules and +extension modules in the same location (also known as :data:`site.USER_SITE`). +Here are the values for UNIX, including Mac OS X: + +=============== =========================================================== +Type of file Installation directory +=============== =========================================================== +modules :file:`{userbase}/lib/python{X.Y}/site-packages` +scripts :file:`{userbase}/bin` +data :file:`{userbase}` +C headers :file:`{userbase}/include/python{X.Y}{abiflags}/{distname}` +=============== =========================================================== + +And here are the values used on Windows: + +=============== =========================================================== +Type of file Installation directory +=============== =========================================================== +modules :file:`{userbase}\\Python{XY}\\site-packages` +scripts :file:`{userbase}\\Scripts` +data :file:`{userbase}` +C headers :file:`{userbase}\\Python{XY}\\Include\\{distname}` +=============== =========================================================== + +The advantage of using this scheme compared to the other ones described below is +that the user site-packages directory is under normal conditions always included +in :data:`sys.path` (see :mod:`site` for more information), which means that +there is no additional step to perform after running the :file:`setup.py` script +to finalize the installation. + +The :command:`build_ext` command also has a ``--user`` option to add +:file:`{userbase}/include` to the compiler search path for header files and +:file:`{userbase}/lib` to the compiler search path for libraries as well as to +the runtime search path for shared C libraries (rpath). + + +.. _inst-alt-install-home: Alternate installation: the home scheme --------------------------------------- @@ -330,23 +391,27 @@ python setup.py install --home=~ +To make Python find the distributions installed with this scheme, you may have +to :ref:`modify Python's search path ` or edit +:mod:`sitecustomize` (see :mod:`site`) to call :func:`site.addsitedir` or edit +:data:`sys.path`. + The :option:`--home` option defines the installation base directory. Files are installed to the following directories under the installation base as follows: -+------------------------------+---------------------------+-----------------------------+ -| Type of file | Installation Directory | Override option | -+==============================+===========================+=============================+ -| pure module distribution | :file:`{home}/lib/python` | :option:`--install-purelib` | -+------------------------------+---------------------------+-----------------------------+ -| non-pure module distribution | :file:`{home}/lib/python` | :option:`--install-platlib` | -+------------------------------+---------------------------+-----------------------------+ -| scripts | :file:`{home}/bin` | :option:`--install-scripts` | -+------------------------------+---------------------------+-----------------------------+ -| data | :file:`{home}/share` | :option:`--install-data` | -+------------------------------+---------------------------+-----------------------------+ +=============== =========================================================== +Type of file Installation directory +=============== =========================================================== +modules :file:`{home}/lib/python` +scripts :file:`{home}/bin` +data :file:`{home}` +C headers :file:`{home}/include/python/{distname}` +=============== =========================================================== +(Mentally replace slashes with backslashes if you're on Windows.) -.. _inst-alt-install-home: + +.. _inst-alt-install-prefix-unix: Alternate installation: Unix (the prefix scheme) ------------------------------------------------ @@ -355,7 +420,7 @@ perform the build/install (i.e., to run the setup script), but install modules into the third-party module directory of a different Python installation (or something that looks like a different Python installation). If this sounds a -trifle unusual, it is---that's why the "home scheme" comes first. However, +trifle unusual, it is---that's why the user and home schemes come before. However, there are at least two known cases where the prefix scheme will be useful. First, consider that many Linux distributions put Python in :file:`/usr`, rather @@ -383,17 +448,15 @@ executables, etc.) If :option:`--exec-prefix` is not supplied, it defaults to :option:`--prefix`. Files are installed as follows: -+------------------------------+-----------------------------------------------------+-----------------------------+ -| Type of file | Installation Directory | Override option | -+==============================+=====================================================+=============================+ -| pure module distribution | :file:`{prefix}/lib/python{X.Y}/site-packages` | :option:`--install-purelib` | -+------------------------------+-----------------------------------------------------+-----------------------------+ -| non-pure module distribution | :file:`{exec-prefix}/lib/python{X.Y}/site-packages` | :option:`--install-platlib` | -+------------------------------+-----------------------------------------------------+-----------------------------+ -| scripts | :file:`{prefix}/bin` | :option:`--install-scripts` | -+------------------------------+-----------------------------------------------------+-----------------------------+ -| data | :file:`{prefix}/share` | :option:`--install-data` | -+------------------------------+-----------------------------------------------------+-----------------------------+ +================= ========================================================== +Type of file Installation directory +================= ========================================================== +Python modules :file:`{prefix}/lib/python{X.Y}/site-packages` +extension modules :file:`{exec-prefix}/lib/python{X.Y}/site-packages` +scripts :file:`{prefix}/bin` +data :file:`{prefix}` +C headers :file:`{prefix}/include/python{X.Y}{abiflags}/{distname}` +================= ========================================================== There is no requirement that :option:`--prefix` or :option:`--exec-prefix` actually point to an alternate Python installation; if the directories listed @@ -418,7 +481,7 @@ alternate Python installation, this is immaterial.) -.. _inst-alt-install-windows: +.. _inst-alt-install-prefix-windows: Alternate installation: Windows (the prefix scheme) --------------------------------------------------- @@ -433,20 +496,18 @@ to install modules to the :file:`\\Temp\\Python` directory on the current drive. The installation base is defined by the :option:`--prefix` option; the -:option:`--exec-prefix` option is not supported under Windows. Files are -installed as follows: +:option:`--exec-prefix` option is not supported under Windows, which means that +pure Python modules and extension modules are installed into the same location. +Files are installed as follows: -+------------------------------+---------------------------+-----------------------------+ -| Type of file | Installation Directory | Override option | -+==============================+===========================+=============================+ -| pure module distribution | :file:`{prefix}` | :option:`--install-purelib` | -+------------------------------+---------------------------+-----------------------------+ -| non-pure module distribution | :file:`{prefix}` | :option:`--install-platlib` | -+------------------------------+---------------------------+-----------------------------+ -| scripts | :file:`{prefix}\\Scripts` | :option:`--install-scripts` | -+------------------------------+---------------------------+-----------------------------+ -| data | :file:`{prefix}\\Data` | :option:`--install-data` | -+------------------------------+---------------------------+-----------------------------+ +=============== ========================================================== +Type of file Installation directory +=============== ========================================================== +modules :file:`{prefix}\\Lib\\site-packages` +scripts :file:`{prefix}\\Scripts` +data :file:`{prefix}` +C headers :file:`{prefix}\\Include\\{distname}` +=============== ========================================================== .. _inst-custom-install: @@ -460,13 +521,29 @@ or you might want to completely redefine the installation scheme. In either case, you're creating a *custom installation scheme*. -You probably noticed the column of "override options" in the tables describing -the alternate installation schemes above. Those options are how you define a -custom installation scheme. These override options can be relative, absolute, +To create a custom installation scheme, you start with one of the alternate +schemes and override some of the installation directories used for the various +types of files, using these options: + +====================== ======================= +Type of file Override option +====================== ======================= +Python modules ``--install-purelib`` +extension modules ``--install-platlib`` +all modules ``--install-lib`` +scripts ``--install-scripts`` +data ``--install-data`` +C headers ``--install-headers`` +====================== ======================= + +These override options can be relative, absolute, or explicitly defined in terms of one of the installation base directories. (There are two installation base directories, and they are normally the same--- they only differ when you use the Unix "prefix scheme" and supply different -:option:`--prefix` and :option:`--exec-prefix` options.) +``--prefix`` and ``--exec-prefix`` options; using ``--install-lib`` will +override values computed or given for ``--install-purelib`` and +``--install-platlib``, and is recommended for schemes that don't make a +difference between Python and extension modules.) For example, say you're installing a module distribution to your home directory under Unix---but you want scripts to go in :file:`~/scripts` rather than @@ -493,15 +570,16 @@ a subdirectory of :file:`{prefix}`, rather than right in :file:`{prefix}` itself. This is almost as easy as customizing the script installation directory ---you just have to remember that there are two types of modules to worry about, -pure modules and non-pure modules (i.e., modules from a non-pure distribution). -For example:: +Python and extension modules, which can conveniently be both controlled by one +option:: - python setup.py install --install-purelib=Site --install-platlib=Site + python setup.py install --install-lib=Site -The specified installation directories are relative to :file:`{prefix}`. Of -course, you also have to ensure that these directories are in Python's module -search path, such as by putting a :file:`.pth` file in :file:`{prefix}`. See -section :ref:`inst-search-path` to find out how to modify Python's search path. +The specified installation directory is relative to :file:`{prefix}`. Of +course, you also have to ensure that this directory is in Python's module +search path, such as by putting a :file:`.pth` file in a site directory (see +:mod:`site`). See section :ref:`inst-search-path` to find out how to modify +Python's search path. If you want to define an entire installation scheme, you just have to supply all of the installation directory options. The recommended way to do this is to @@ -553,8 +631,8 @@ python setup.py install --install-base=/tmp -would install pure modules to :file:`{/tmp/python/lib}` in the first case, and -to :file:`{/tmp/lib}` in the second case. (For the second case, you probably +would install pure modules to :file:`/tmp/python/lib` in the first case, and +to :file:`/tmp/lib` in the second case. (For the second case, you probably want to supply an installation base of :file:`/tmp/python`.) You probably noticed the use of ``$HOME`` and ``$PLAT`` in the sample @@ -571,7 +649,7 @@ needed on those platforms? -.. XXX I'm not sure where this section should go. +.. XXX Move this to Doc/using .. _inst-search-path: diff --git a/Doc/install/install.rst b/Doc/install/install.rst --- a/Doc/install/install.rst +++ b/Doc/install/install.rst @@ -293,6 +293,14 @@ >>> sys.exec_prefix '/usr' +A few other placeholders are used in this document: :file:`{X.Y}` stands for the +version of Python, for example ``3.2``; :file:`{abiflags}` will be replaced by +the value of :data:`sys.abiflags` or the empty string for platforms which don't +define ABI flags; :file:`{distname}` will be replaced by the name of the module +distribution being installed. Dots and capitalization are important in the +paths; for example, a value that uses ``python3.2`` on UNIX will typically use +``Python32`` on Windows. + If you don't want to install modules to the standard location, or if you don't have permission to write there, then you need to read about alternate installations in section :ref:`packaging-alt-install`. If you want to customize your @@ -320,8 +328,72 @@ differ across platforms, so read whichever of the following sections applies to you. +Note that the various alternate installation schemes are mutually exclusive: you +can pass ``--user``, or ``--home``, or ``--prefix`` and ``--exec-prefix``, or +``--install-base`` and ``--install-platbase``, but you can't mix from these +groups. -.. _packaging-alt-install-prefix: + +.. _packaging-alt-install-user: + +Alternate installation: the user scheme +--------------------------------------- + +This scheme is designed to be the most convenient solution for users that don't +have write permission to the global site-packages directory or don't want to +install into it. It is enabled with a simple option:: + + pysetup run install_dist --user + +Files will be installed into subdirectories of :data:`site.USER_BASE` (written +as :file:`{userbase}` hereafter). This scheme installs pure Python modules and +extension modules in the same location (also known as :data:`site.USER_SITE`). +Here are the values for UNIX, including non-framework builds on Mac OS X: + +=============== =========================================================== +Type of file Installation directory +=============== =========================================================== +modules :file:`{userbase}/lib/python{X.Y}/site-packages` +scripts :file:`{userbase}/bin` +data :file:`{userbase}` +C headers :file:`{userbase}/include/python{X.Y}` +=============== =========================================================== + +Framework builds on Mac OS X use these paths: + +=============== =========================================================== +Type of file Installation directory +=============== =========================================================== +modules :file:`{userbase}/lib/python/site-packages` +scripts :file:`{userbase}/bin` +data :file:`{userbase}` +C headers :file:`{userbase}/include/python` +=============== =========================================================== + +And here are the values used on Windows: + +=============== =========================================================== +Type of file Installation directory +=============== =========================================================== +modules :file:`{userbase}\\Python{XY}\\site-packages` +scripts :file:`{userbase}\\Scripts` +data :file:`{userbase}` +C headers :file:`{userbase}\\Python{XY}\\Include` +=============== =========================================================== + +The advantage of using this scheme compared to the other ones described below is +that the user site-packages directory is under normal conditions always included +in :data:`sys.path` (see :mod:`site` for more information), which means that +there is no additional step to perform after running ``pysetup`` to finalize the +installation. + +The :command:`build_ext` command also has a ``--user`` option to add +:file:`{userbase}/include` to the compiler search path for header files and +:file:`{userbase}/lib` to the compiler search path for libraries as well as to +the runtime search path for shared C libraries (rpath). + + +.. _packaging-alt-install-home: Alternate installation: the home scheme --------------------------------------- @@ -343,23 +415,27 @@ pysetup run install_dist --home ~ +To make Python find the distributions installed with this scheme, you may have +to :ref:`modify Python's search path ` or edit +:mod:`sitecustomize` (see :mod:`site`) to call :func:`site.addsitedir` or edit +:data:`sys.path`. + The :option:`--home` option defines the base directory for the installation. Under it, files are installed to the following directories: -+------------------------------+---------------------------+-----------------------------+ -| Type of file | Installation Directory | Override option | -+==============================+===========================+=============================+ -| pure module distribution | :file:`{home}/lib/python` | :option:`--install-purelib` | -+------------------------------+---------------------------+-----------------------------+ -| non-pure module distribution | :file:`{home}/lib/python` | :option:`--install-platlib` | -+------------------------------+---------------------------+-----------------------------+ -| scripts | :file:`{home}/bin` | :option:`--install-scripts` | -+------------------------------+---------------------------+-----------------------------+ -| data | :file:`{home}/share` | :option:`--install-data` | -+------------------------------+---------------------------+-----------------------------+ +=============== =========================================================== +Type of file Installation directory +=============== =========================================================== +modules :file:`{home}/lib/python` +scripts :file:`{home}/bin` +data :file:`{home}` +C headers :file:`{home}/include/python` +=============== =========================================================== +(Mentally replace slashes with backslashes if you're on Windows.) -.. _packaging-alt-install-home: + +.. _packaging-alt-install-prefix-unix: Alternate installation: Unix (the prefix scheme) ------------------------------------------------ @@ -368,8 +444,8 @@ run the build command, but install modules into the third-party module directory of a different Python installation (or something that looks like a different Python installation). If this sounds a trifle unusual, it is ---that's why the -"home scheme" comes first. However, there are at least two known cases where the -prefix scheme will be useful. +user and home schemes come before. However, there are at least two known cases +where the prefix scheme will be useful. First, consider that many Linux distributions put Python in :file:`/usr`, rather than the more traditional :file:`/usr/local`. This is entirely appropriate, @@ -396,17 +472,17 @@ executables, etc.) If :option:`--exec-prefix` is not supplied, it defaults to :option:`--prefix`. Files are installed as follows: -+------------------------------+-----------------------------------------------------+-----------------------------+ -| Type of file | Installation Directory | Override option | -+==============================+=====================================================+=============================+ -| pure module distribution | :file:`{prefix}/lib/python{X.Y}/site-packages` | :option:`--install-purelib` | -+------------------------------+-----------------------------------------------------+-----------------------------+ -| non-pure module distribution | :file:`{exec-prefix}/lib/python{X.Y}/site-packages` | :option:`--install-platlib` | -+------------------------------+-----------------------------------------------------+-----------------------------+ -| scripts | :file:`{prefix}/bin` | :option:`--install-scripts` | -+------------------------------+-----------------------------------------------------+-----------------------------+ -| data | :file:`{prefix}/share` | :option:`--install-data` | -+------------------------------+-----------------------------------------------------+-----------------------------+ +================= ========================================================== +Type of file Installation directory +================= ========================================================== +Python modules :file:`{prefix}/lib/python{X.Y}/site-packages` +extension modules :file:`{exec-prefix}/lib/python{X.Y}/site-packages` +scripts :file:`{prefix}/bin` +data :file:`{prefix}` +C headers :file:`{prefix}/include/python{X.Y}{abiflags}` +================= ========================================================== + +.. XXX misses an entry for platinclude There is no requirement that :option:`--prefix` or :option:`--exec-prefix` actually point to an alternate Python installation; if the directories listed @@ -432,7 +508,7 @@ this is immaterial.) -.. _packaging-alt-install-windows: +.. _packaging-alt-install-prefix-windows: Alternate installation: Windows (the prefix scheme) --------------------------------------------------- @@ -447,20 +523,18 @@ to install modules to the :file:`\\Temp\\Python` directory on the current drive. The installation base is defined by the :option:`--prefix` option; the -:option:`--exec-prefix` option is unsupported under Windows. Files are -installed as follows: +:option:`--exec-prefix` option is not supported under Windows, which means that +pure Python modules and extension modules are installed into the same location. +Files are installed as follows: -+------------------------------+---------------------------+-----------------------------+ -| Type of file | Installation Directory | Override option | -+==============================+===========================+=============================+ -| pure module distribution | :file:`{prefix}` | :option:`--install-purelib` | -+------------------------------+---------------------------+-----------------------------+ -| non-pure module distribution | :file:`{prefix}` | :option:`--install-platlib` | -+------------------------------+---------------------------+-----------------------------+ -| scripts | :file:`{prefix}\\Scripts` | :option:`--install-scripts` | -+------------------------------+---------------------------+-----------------------------+ -| data | :file:`{prefix}\\Data` | :option:`--install-data` | -+------------------------------+---------------------------+-----------------------------+ +=============== ========================================================== +Type of file Installation directory +=============== ========================================================== +modules :file:`{prefix}\\Lib\\site-packages` +scripts :file:`{prefix}\\Scripts` +data :file:`{prefix}` +C headers :file:`{prefix}\\Include` +=============== ========================================================== .. _packaging-custom-install: @@ -474,13 +548,29 @@ directory, or you might want to completely redefine the installation scheme. In either case, you're creating a *custom installation scheme*. -You probably noticed the column of "override options" in the tables describing -the alternate installation schemes above. Those options are how you define a -custom installation scheme. These override options can be relative, absolute, +To create a custom installation scheme, you start with one of the alternate +schemes and override some of the installation directories used for the various +types of files, using these options: + +====================== ======================= +Type of file Override option +====================== ======================= +Python modules ``--install-purelib`` +extension modules ``--install-platlib`` +all modules ``--install-lib`` +scripts ``--install-scripts`` +data ``--install-data`` +C headers ``--install-headers`` +====================== ======================= + +These override options can be relative, absolute, or explicitly defined in terms of one of the installation base directories. (There are two installation base directories, and they are normally the same ---they only differ when you use the Unix "prefix scheme" and supply different -:option:`--prefix` and :option:`--exec-prefix` options.) +``--prefix`` and ``--exec-prefix`` options; using ``--install-lib`` will +override values computed or given for ``--install-purelib`` and +``--install-platlib``, and is recommended for schemes that don't make a +difference between Python and extension modules.) For example, say you're installing a module distribution to your home directory under Unix, but you want scripts to go in :file:`~/scripts` rather than @@ -507,17 +597,18 @@ a subdirectory of :file:`{prefix}`, rather than right in :file:`{prefix}` itself. This is almost as easy as customizing the script installation directory ---you just have to remember that there are two types of modules to worry about, -pure modules and non-pure modules (i.e., modules from a non-pure distribution). -For example:: +Python and extension modules, which can conveniently be both controlled by one +option:: - pysetup run install_dist --install-purelib Site --install-platlib Site + pysetup run install_dist --install-lib Site .. XXX Nothing is installed right under prefix in windows, is it?? -The specified installation directories are relative to :file:`{prefix}`. Of -course, you also have to ensure that these directories are in Python's module -search path, such as by putting a :file:`.pth` file in :file:`{prefix}`. See -section :ref:`packaging-search-path` to find out how to modify Python's search path. +The specified installation directory is relative to :file:`{prefix}`. Of +course, you also have to ensure that this directory is in Python's module +search path, such as by putting a :file:`.pth` file in a site directory (see +:mod:`site`). See section :ref:`packaging-search-path` to find out how to modify +Python's search path. If you want to define an entire installation scheme, you just have to supply all of the installation directory options. Using relative paths is recommended here. diff --git a/Doc/library/shutil.rst b/Doc/library/shutil.rst --- a/Doc/library/shutil.rst +++ b/Doc/library/shutil.rst @@ -175,8 +175,9 @@ .. function:: disk_usage(path) - Return disk usage statistics about the given path as a namedtuple including - total, used and free space expressed in bytes. + Return disk usage statistics about the given path as a :term:`named tuple` + with the attributes *total*, *used* and *free*, which are the amount of + total, used and free space, in bytes. .. versionadded:: 3.3 diff --git a/Doc/library/site.rst b/Doc/library/site.rst --- a/Doc/library/site.rst +++ b/Doc/library/site.rst @@ -2,22 +2,24 @@ ================================================ .. module:: site - :synopsis: A standard way to reference site-specific modules. + :synopsis: Module responsible for site-specific configuration. **Source code:** :source:`Lib/site.py` -------------- +.. highlightlang:: none + **This module is automatically imported during initialization.** The automatic import can be suppressed using the interpreter's :option:`-S` option. .. index:: triple: module; search; path -Importing this module will append site-specific paths to the module search -path, unless :option:`-S` was used. In that case, this module can be safely -imported with no automatic modifications to the module search path. To -explicitly trigger the usual site-specific additions, call the -:func:`site.main` function. +Importing this module will append site-specific paths to the module search path +and add a few builtins, unless :option:`-S` was used. In that case, this module +can be safely imported with no automatic modifications to the module search path +or additions to the builtins. To explicitly trigger the usual site-specific +additions, call the :func:`site.main` function. .. versionchanged:: 3.3 Importing the module used to trigger paths manipulation even when using @@ -36,11 +38,11 @@ if it refers to an existing directory, and if so, adds it to ``sys.path`` and also inspects the newly added path for configuration files. -A path configuration file is a file whose name has the form :file:`package.pth` +A path configuration file is a file whose name has the form :file:`{name}.pth` and exists in one of the four directories mentioned above; its contents are additional items (one per line) to be added to ``sys.path``. Non-existing items -are never added to ``sys.path``, but no check is made that the item refers to a -directory (rather than a file). No item is added to ``sys.path`` more than +are never added to ``sys.path``, and no check is made that the item refers to a +directory rather than a file. No item is added to ``sys.path`` more than once. Blank lines and lines beginning with ``#`` are skipped. Lines starting with ``import`` (followed by space or tab) are executed. @@ -50,8 +52,7 @@ For example, suppose ``sys.prefix`` and ``sys.exec_prefix`` are set to :file:`/usr/local`. The Python X.Y library is then installed in -:file:`/usr/local/lib/python{X.Y}` (where only the first three characters of -``sys.version`` are used to form the installation path name). Suppose this has +:file:`/usr/local/lib/python{X.Y}`. Suppose this has a subdirectory :file:`/usr/local/lib/python{X.Y}/site-packages` with three subsubdirectories, :file:`foo`, :file:`bar` and :file:`spam`, and two path configuration files, :file:`foo.pth` and :file:`bar.pth`. Assume @@ -84,42 +85,59 @@ After these path manipulations, an attempt is made to import a module named :mod:`sitecustomize`, which can perform arbitrary site-specific customizations. -If this import fails with an :exc:`ImportError` exception, it is silently -ignored. +It is typically created by a system administrator in the site-packages +directory. If this import fails with an :exc:`ImportError` exception, it is +silently ignored. -.. index:: module: sitecustomize +.. index:: module: usercustomize + +After this, an attempt is made to import a module named :mod:`usercustomize`, +which can perform arbitrary user-specific customizations, if +:data:`ENABLE_USER_SITE` is true. This file is intended to be created in the +user site-packages directory (see below), which is part of ``sys.path`` unless +disabled by :option:`-s`. An :exc:`ImportError` will be silently ignored. Note that for some non-Unix systems, ``sys.prefix`` and ``sys.exec_prefix`` are empty, and the path manipulations are skipped; however the import of -:mod:`sitecustomize` is still attempted. +:mod:`sitecustomize` and :mod:`usercustomize` is still attempted. .. data:: PREFIXES - A list of prefixes for site package directories + A list of prefixes for site-packages directories. .. data:: ENABLE_USER_SITE - Flag showing the status of the user site directory. True means the - user site directory is enabled and added to sys.path. When the flag - is None the user site directory is disabled for security reasons. + Flag showing the status of the user site-packages directory. ``True`` means + that it is enabled and was added to ``sys.path``. ``False`` means that it + was disabled by user request (with :option:`-s` or + :envvar:`PYTHONNOUSERSITE`). ``None`` means it was disabled for security + reasons (mismatch between user or group id and effective id) or by an + administrator. .. data:: USER_SITE - Path to the user site directory for the current Python version or None + Path to the user site-packages for the running Python. Can be ``None`` if + :func:`getusersitepackages` hasn't been called yet. Default value is + :file:`~/.local/lib/python{X.Y}/site-packages` for UNIX and non-framework Mac + OS X builds, :file:`~/Library/Python/{X.Y}/lib/python/site-packages` for Mac + framework builds, and :file:`{%APPDATA%}\\Python\\Python{XY}\\site-packages` + on Windows. This directory is a site directory, which means that + :file:`.pth` files in it will be processed. .. data:: USER_BASE - Path to the base directory for user site directories - - -.. envvar:: PYTHONNOUSERSITE - - -.. envvar:: PYTHONUSERBASE + Path to the base directory for the user site-packages. Can be ``None`` if + :func:`getuserbase` hasn't been called yet. Default value is + :file:`~/.local` for UNIX and Mac OS X non-framework builds, + :file:`~/Library/Python/{X.Y}` for Mac framework builds, and + :file:`{%APPDATA%}\\Python` for Windows. This value is used by Packaging to + compute the installation directories for scripts, data files, Python modules, + etc. for the :ref:`user installation scheme `. + See also :envvar:`PYTHONUSERBASE`. .. function:: main() @@ -135,34 +153,67 @@ .. function:: addsitedir(sitedir, known_paths=None) - Adds a directory to sys.path and processes its pth files. + Add a directory to sys.path and process its :file:`.pth` files. Typically + used in :mod:`sitecustomize` or :mod:`usercustomize` (see above). + .. function:: getsitepackages() - Returns a list containing all global site-packages directories - (and possibly site-python). + Return a list containing all global site-packages directories (and possibly + site-python). .. versionadded:: 3.2 + .. function:: getuserbase() - Returns the "user base" directory path. - - The "user base" directory can be used to store data. If the global - variable ``USER_BASE`` is not initialized yet, this function will also set - it. + Return the path of the user base directory, :data:`USER_BASE`. If it is not + initialized yet, this function will also set it, respecting + :envvar:`PYTHONUSERBASE`. .. versionadded:: 3.2 + .. function:: getusersitepackages() - Returns the user-specific site-packages directory path. - - If the global variable ``USER_SITE`` is not initialized yet, this - function will also set it. + Return the path of the user-specific site-packages directory, + :data:`USER_SITE`. If it is not initialized yet, this function will also set + it, respecting :envvar:`PYTHONNOUSERSITE` and :data:`USER_BASE`. .. versionadded:: 3.2 -.. XXX Update documentation -.. XXX document python -m site --user-base --user-site +The :mod:`site` module also provides a way to get the user directories from the +command line: + +.. code-block:: sh + + $ python3 -m site --user-site + /home/user/.local/lib/python3.3/site-packages + +.. program:: site + +If it is called without arguments, it will print the contents of +:data:`sys.path` on the standard output, followed by the value of +:data:`USER_BASE` and whether the directory exists, then the same thing for +:data:`USER_SITE`, and finally the value of :data:`ENABLE_USER_SITE`. + +.. cmdoption:: --user-base + + Print the path to the user base directory. + +.. cmdoption:: --user-site + + Print the path to the user site-packages directory. + +If both options are given, user base and user site will be printed (always in +this order), separated by :data:`os.pathsep`. + +If any option is given, the script will exit with one of these values: ``O`` if +the user site-packages directory is enabled, ``1`` if it was disabled by the +user, ``2`` if it is disabled for security reasons or by an administrator, and a +value greater than 2 if there is an error. + +.. seealso:: + + :pep:`370` -- Per user site-packages directory diff --git a/Doc/packaging/commandref.rst b/Doc/packaging/commandref.rst --- a/Doc/packaging/commandref.rst +++ b/Doc/packaging/commandref.rst @@ -12,6 +12,12 @@ description. Use pysetup run --help to get help about the options of one command. +.. XXX sections from this document should be merged with other docs (e.g. check + and upload with uploading.rst, install_* with install/install.rst, etc.); + there is no value in partially duplicating information. this file could + however serve as an index, i.e. just a list of all commands with links to + every section that describes options or usage + Preparing distributions ======================= @@ -310,13 +316,8 @@ ----------------------- Install a distribution, delegating to the other :command:`install_*` commands to -do the work. - -.. program:: packaging install_dist - -.. cmdoption:: --user - - Install in user site-packages directory (see :PEP:`370`). +do the work. See :ref:`packaging-how-install-works` for complete usage +instructions. :command:`install_data` diff --git a/Doc/tutorial/interactive.rst b/Doc/tutorial/interactive.rst --- a/Doc/tutorial/interactive.rst +++ b/Doc/tutorial/interactive.rst @@ -156,17 +156,18 @@ quotes, etc., would also be useful. One alternative enhanced interactive interpreter that has been around for quite -some time is `IPython`_, which features tab completion, object exploration and +some time is IPython_, which features tab completion, object exploration and advanced history management. It can also be thoroughly customized and embedded into other applications. Another similar enhanced interactive environment is -`bpython`_. +bpython_. .. rubric:: Footnotes .. [#] Python will execute the contents of a file identified by the :envvar:`PYTHONSTARTUP` environment variable when you start an interactive - interpreter. + interpreter. To customize Python even for non-interactive mode, see + :ref:`tut-customize`. .. _GNU Readline: http://tiswww.case.edu/php/chet/readline/rltop.html diff --git a/Doc/tutorial/interpreter.rst b/Doc/tutorial/interpreter.rst --- a/Doc/tutorial/interpreter.rst +++ b/Doc/tutorial/interpreter.rst @@ -236,6 +236,29 @@ exec(open(filename).read()) +.. _tut-customize: + +The Customization Modules +------------------------- + +Python provides two hooks to let you customize it: :mod:`sitecustomize` and +:mod:`usercustomize`. To see how it works, you need first to find the location +of your user site-packages directory. Start Python and run this code: + + >>> import site + >>> site.getusersitepackages() + '/home/user/.local/lib/python3.2/site-packages' + +Now you can create a file named :file:`usercustomize.py` in that directory and +put anything you want in it. It will affect every invocation of Python, unless +it is started with the :option:`-s` option to disable the automatic import. + +:mod:`sitecustomize` works in the same way, but is typically created by an +administrator of the computer in the global site-packages directory, and is +imported before :mod:`usercustomize`. See the documentation of the :mod:`site` +module for more details. + + .. rubric:: Footnotes .. [#] On Unix, the Python 3.x interpreter is by default not installed with the @@ -243,4 +266,3 @@ simultaneously installed Python 2.x executable. .. [#] A problem with the GNU Readline package may prevent this. - diff --git a/Doc/using/cmdline.rst b/Doc/using/cmdline.rst --- a/Doc/using/cmdline.rst +++ b/Doc/using/cmdline.rst @@ -229,7 +229,8 @@ .. cmdoption:: -s - Don't add user site directory to sys.path + Don't add the :data:`user site-packages directory ` to + :data:`sys.path`. .. seealso:: @@ -470,7 +471,8 @@ .. envvar:: PYTHONNOUSERSITE - If this is set, Python won't add the user site directory to sys.path + If this is set, Python won't add the :data:`user site-packages directory + ` to :data:`sys.path`. .. seealso:: @@ -479,7 +481,10 @@ .. envvar:: PYTHONUSERBASE - Sets the base directory for the user site directory + Defines the :data:`user base directory `, which is used to + compute the path of the :data:`user site-packages directory ` + and :ref:`Packaging installation paths ` for + ``pysetup run install_dist --user``. .. seealso:: diff --git a/Lib/shutil.py b/Lib/shutil.py --- a/Lib/shutil.py +++ b/Lib/shutil.py @@ -12,7 +12,6 @@ import collections import errno import tarfile -from collections import namedtuple try: import bz2 @@ -36,6 +35,7 @@ "register_archive_format", "unregister_archive_format", "get_unpack_formats", "register_unpack_format", "unregister_unpack_format", "unpack_archive"] + # disk_usage is added later, if available on the platform class Error(EnvironmentError): pass @@ -756,20 +756,36 @@ kwargs = dict(_UNPACK_FORMATS[format][2]) func(filename, extract_dir, **kwargs) -if hasattr(os, "statvfs") or os.name == 'nt': - _ntuple_diskusage = namedtuple('usage', 'total used free') + +if hasattr(os, 'statvfs'): + + __all__.append('disk_usage') + _ntuple_diskusage = collections.namedtuple('usage', 'total used free') def disk_usage(path): - """Return disk usage statistics about the given path as a namedtuple - including total, used and free space expressed in bytes. + """Return disk usage statistics about the given path. + + Returned valus is a named tuple with attributes 'total', 'used' and + 'free', which are the amount of total, used and free space, in bytes. """ - if hasattr(os, "statvfs"): - st = os.statvfs(path) - free = (st.f_bavail * st.f_frsize) - total = (st.f_blocks * st.f_frsize) - used = (st.f_blocks - st.f_bfree) * st.f_frsize - else: - import nt - total, free = nt._getdiskusage(path) - used = total - free + st = os.statvfs(path) + free = st.f_bavail * st.f_frsize + total = st.f_blocks * st.f_frsize + used = (st.f_blocks - st.f_bfree) * st.f_frsize return _ntuple_diskusage(total, used, free) + +elif os.name == 'nt': + + import nt + __all__.append('disk_usage') + _ntuple_diskusage = collections.namedtuple('usage', 'total used free') + + def disk_usage(path): + """Return disk usage statistics about the given path. + + Returned valus is a named tuple with attributes 'total', 'used' and + 'free', which are the amount of total, used and free space, in bytes. + """ + total, free = nt._getdiskusage(path) + used = total - free + return _ntuple_diskusage(total, used, free) -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Tue Aug 9 18:11:20 2011 From: python-checkins at python.org (eric.araujo) Date: Tue, 09 Aug 2011 18:11:20 +0200 Subject: [Python-checkins] =?utf8?b?Y3B5dGhvbiAobWVyZ2UgMy4yIC0+IDMuMik6?= =?utf8?q?_Branch_merge?= Message-ID: http://hg.python.org/cpython/rev/7ea5d9c858f1 changeset: 71786:7ea5d9c858f1 branch: 3.2 parent: 71774:0e5433cfe404 parent: 71783:66a52da1c9da user: ?ric Araujo date: Tue Aug 09 18:01:38 2011 +0200 summary: Branch merge files: Doc/install/index.rst | 184 ++++++++++++++++------ Doc/library/site.rst | 126 +++++++++++---- Doc/tutorial/interactive.rst | 7 +- Doc/tutorial/interpreter.rst | 24 ++- Doc/using/cmdline.rst | 11 +- 5 files changed, 255 insertions(+), 97 deletions(-) diff --git a/Doc/install/index.rst b/Doc/install/index.rst --- a/Doc/install/index.rst +++ b/Doc/install/index.rst @@ -279,6 +279,14 @@ >>> sys.exec_prefix '/usr' +A few other placeholders are used in this document: :file:`{X.Y}` stands for the +version of Python, for example ``3.2``; :file:`{abiflags}` will be replaced by +the value of :data:`sys.abiflags` or the empty string for platforms which don't +define ABI flags; :file:`{distname}` will be replaced by the name of the module +distribution being installed. Dots and capitalization are important in the +paths; for example, a value that uses ``python3.2`` on UNIX will typically use +``Python32`` on Windows. + If you don't want to install modules to the standard location, or if you don't have permission to write there, then you need to read about alternate installations in section :ref:`inst-alt-install`. If you want to customize your @@ -307,8 +315,61 @@ differ across platforms, so read whichever of the following sections applies to you. +Note that the various alternate installation schemes are mutually exclusive: you +can pass ``--user``, or ``--home``, or ``--prefix`` and ``--exec-prefix``, or +``--install-base`` and ``--install-platbase``, but you can't mix from these +groups. -.. _inst-alt-install-prefix: + +.. _inst-alt-install-user: + +Alternate installation: the user scheme +--------------------------------------- + +This scheme is designed to be the most convenient solution for users that don't +have write permission to the global site-packages directory or don't want to +install into it. It is enabled with a simple option:: + + python setup.py install --user + +Files will be installed into subdirectories of :data:`site.USER_BASE` (written +as :file:`{userbase}` hereafter). This scheme installs pure Python modules and +extension modules in the same location (also known as :data:`site.USER_SITE`). +Here are the values for UNIX, including Mac OS X: + +=============== =========================================================== +Type of file Installation directory +=============== =========================================================== +modules :file:`{userbase}/lib/python{X.Y}/site-packages` +scripts :file:`{userbase}/bin` +data :file:`{userbase}` +C headers :file:`{userbase}/include/python{X.Y}{abiflags}/{distname}` +=============== =========================================================== + +And here are the values used on Windows: + +=============== =========================================================== +Type of file Installation directory +=============== =========================================================== +modules :file:`{userbase}\\Python{XY}\\site-packages` +scripts :file:`{userbase}\\Scripts` +data :file:`{userbase}` +C headers :file:`{userbase}\\Python{XY}\\Include\\{distname}` +=============== =========================================================== + +The advantage of using this scheme compared to the other ones described below is +that the user site-packages directory is under normal conditions always included +in :data:`sys.path` (see :mod:`site` for more information), which means that +there is no additional step to perform after running the :file:`setup.py` script +to finalize the installation. + +The :command:`build_ext` command also has a ``--user`` option to add +:file:`{userbase}/include` to the compiler search path for header files and +:file:`{userbase}/lib` to the compiler search path for libraries as well as to +the runtime search path for shared C libraries (rpath). + + +.. _inst-alt-install-home: Alternate installation: the home scheme --------------------------------------- @@ -330,23 +391,27 @@ python setup.py install --home=~ +To make Python find the distributions installed with this scheme, you may have +to :ref:`modify Python's search path ` or edit +:mod:`sitecustomize` (see :mod:`site`) to call :func:`site.addsitedir` or edit +:data:`sys.path`. + The :option:`--home` option defines the installation base directory. Files are installed to the following directories under the installation base as follows: -+------------------------------+---------------------------+-----------------------------+ -| Type of file | Installation Directory | Override option | -+==============================+===========================+=============================+ -| pure module distribution | :file:`{home}/lib/python` | :option:`--install-purelib` | -+------------------------------+---------------------------+-----------------------------+ -| non-pure module distribution | :file:`{home}/lib/python` | :option:`--install-platlib` | -+------------------------------+---------------------------+-----------------------------+ -| scripts | :file:`{home}/bin` | :option:`--install-scripts` | -+------------------------------+---------------------------+-----------------------------+ -| data | :file:`{home}/share` | :option:`--install-data` | -+------------------------------+---------------------------+-----------------------------+ +=============== =========================================================== +Type of file Installation directory +=============== =========================================================== +modules :file:`{home}/lib/python` +scripts :file:`{home}/bin` +data :file:`{home}` +C headers :file:`{home}/include/python/{distname}` +=============== =========================================================== +(Mentally replace slashes with backslashes if you're on Windows.) -.. _inst-alt-install-home: + +.. _inst-alt-install-prefix-unix: Alternate installation: Unix (the prefix scheme) ------------------------------------------------ @@ -355,7 +420,7 @@ perform the build/install (i.e., to run the setup script), but install modules into the third-party module directory of a different Python installation (or something that looks like a different Python installation). If this sounds a -trifle unusual, it is---that's why the "home scheme" comes first. However, +trifle unusual, it is---that's why the user and home schemes come before. However, there are at least two known cases where the prefix scheme will be useful. First, consider that many Linux distributions put Python in :file:`/usr`, rather @@ -383,17 +448,15 @@ executables, etc.) If :option:`--exec-prefix` is not supplied, it defaults to :option:`--prefix`. Files are installed as follows: -+------------------------------+-----------------------------------------------------+-----------------------------+ -| Type of file | Installation Directory | Override option | -+==============================+=====================================================+=============================+ -| pure module distribution | :file:`{prefix}/lib/python{X.Y}/site-packages` | :option:`--install-purelib` | -+------------------------------+-----------------------------------------------------+-----------------------------+ -| non-pure module distribution | :file:`{exec-prefix}/lib/python{X.Y}/site-packages` | :option:`--install-platlib` | -+------------------------------+-----------------------------------------------------+-----------------------------+ -| scripts | :file:`{prefix}/bin` | :option:`--install-scripts` | -+------------------------------+-----------------------------------------------------+-----------------------------+ -| data | :file:`{prefix}/share` | :option:`--install-data` | -+------------------------------+-----------------------------------------------------+-----------------------------+ +================= ========================================================== +Type of file Installation directory +================= ========================================================== +Python modules :file:`{prefix}/lib/python{X.Y}/site-packages` +extension modules :file:`{exec-prefix}/lib/python{X.Y}/site-packages` +scripts :file:`{prefix}/bin` +data :file:`{prefix}` +C headers :file:`{prefix}/include/python{X.Y}{abiflags}/{distname}` +================= ========================================================== There is no requirement that :option:`--prefix` or :option:`--exec-prefix` actually point to an alternate Python installation; if the directories listed @@ -418,7 +481,7 @@ alternate Python installation, this is immaterial.) -.. _inst-alt-install-windows: +.. _inst-alt-install-prefix-windows: Alternate installation: Windows (the prefix scheme) --------------------------------------------------- @@ -433,20 +496,18 @@ to install modules to the :file:`\\Temp\\Python` directory on the current drive. The installation base is defined by the :option:`--prefix` option; the -:option:`--exec-prefix` option is not supported under Windows. Files are -installed as follows: +:option:`--exec-prefix` option is not supported under Windows, which means that +pure Python modules and extension modules are installed into the same location. +Files are installed as follows: -+------------------------------+---------------------------+-----------------------------+ -| Type of file | Installation Directory | Override option | -+==============================+===========================+=============================+ -| pure module distribution | :file:`{prefix}` | :option:`--install-purelib` | -+------------------------------+---------------------------+-----------------------------+ -| non-pure module distribution | :file:`{prefix}` | :option:`--install-platlib` | -+------------------------------+---------------------------+-----------------------------+ -| scripts | :file:`{prefix}\\Scripts` | :option:`--install-scripts` | -+------------------------------+---------------------------+-----------------------------+ -| data | :file:`{prefix}\\Data` | :option:`--install-data` | -+------------------------------+---------------------------+-----------------------------+ +=============== ========================================================== +Type of file Installation directory +=============== ========================================================== +modules :file:`{prefix}\\Lib\\site-packages` +scripts :file:`{prefix}\\Scripts` +data :file:`{prefix}` +C headers :file:`{prefix}\\Include\\{distname}` +=============== ========================================================== .. _inst-custom-install: @@ -460,13 +521,29 @@ or you might want to completely redefine the installation scheme. In either case, you're creating a *custom installation scheme*. -You probably noticed the column of "override options" in the tables describing -the alternate installation schemes above. Those options are how you define a -custom installation scheme. These override options can be relative, absolute, +To create a custom installation scheme, you start with one of the alternate +schemes and override some of the installation directories used for the various +types of files, using these options: + +====================== ======================= +Type of file Override option +====================== ======================= +Python modules ``--install-purelib`` +extension modules ``--install-platlib`` +all modules ``--install-lib`` +scripts ``--install-scripts`` +data ``--install-data`` +C headers ``--install-headers`` +====================== ======================= + +These override options can be relative, absolute, or explicitly defined in terms of one of the installation base directories. (There are two installation base directories, and they are normally the same--- they only differ when you use the Unix "prefix scheme" and supply different -:option:`--prefix` and :option:`--exec-prefix` options.) +``--prefix`` and ``--exec-prefix`` options; using ``--install-lib`` will +override values computed or given for ``--install-purelib`` and +``--install-platlib``, and is recommended for schemes that don't make a +difference between Python and extension modules.) For example, say you're installing a module distribution to your home directory under Unix---but you want scripts to go in :file:`~/scripts` rather than @@ -493,15 +570,16 @@ a subdirectory of :file:`{prefix}`, rather than right in :file:`{prefix}` itself. This is almost as easy as customizing the script installation directory ---you just have to remember that there are two types of modules to worry about, -pure modules and non-pure modules (i.e., modules from a non-pure distribution). -For example:: +Python and extension modules, which can conveniently be both controlled by one +option:: - python setup.py install --install-purelib=Site --install-platlib=Site + python setup.py install --install-lib=Site -The specified installation directories are relative to :file:`{prefix}`. Of -course, you also have to ensure that these directories are in Python's module -search path, such as by putting a :file:`.pth` file in :file:`{prefix}`. See -section :ref:`inst-search-path` to find out how to modify Python's search path. +The specified installation directory is relative to :file:`{prefix}`. Of +course, you also have to ensure that this directory is in Python's module +search path, such as by putting a :file:`.pth` file in a site directory (see +:mod:`site`). See section :ref:`inst-search-path` to find out how to modify +Python's search path. If you want to define an entire installation scheme, you just have to supply all of the installation directory options. The recommended way to do this is to @@ -553,8 +631,8 @@ python setup.py install --install-base=/tmp -would install pure modules to :file:`{/tmp/python/lib}` in the first case, and -to :file:`{/tmp/lib}` in the second case. (For the second case, you probably +would install pure modules to :file:`/tmp/python/lib` in the first case, and +to :file:`/tmp/lib` in the second case. (For the second case, you probably want to supply an installation base of :file:`/tmp/python`.) You probably noticed the use of ``$HOME`` and ``$PLAT`` in the sample @@ -571,7 +649,7 @@ needed on those platforms? -.. XXX I'm not sure where this section should go. +.. XXX Move this to Doc/using .. _inst-search-path: diff --git a/Doc/library/site.rst b/Doc/library/site.rst --- a/Doc/library/site.rst +++ b/Doc/library/site.rst @@ -2,18 +2,21 @@ ================================================ .. module:: site - :synopsis: A standard way to reference site-specific modules. + :synopsis: Module responsible for site-specific configuration. **Source code:** :source:`Lib/site.py` -------------- +.. highlightlang:: none + **This module is automatically imported during initialization.** The automatic import can be suppressed using the interpreter's :option:`-S` option. .. index:: triple: module; search; path -Importing this module will append site-specific paths to the module search path. +Importing this module will append site-specific paths to the module search path +and add a few builtins. .. index:: pair: site-python; directory @@ -28,11 +31,11 @@ if it refers to an existing directory, and if so, adds it to ``sys.path`` and also inspects the newly added path for configuration files. -A path configuration file is a file whose name has the form :file:`package.pth` +A path configuration file is a file whose name has the form :file:`{name}.pth` and exists in one of the four directories mentioned above; its contents are additional items (one per line) to be added to ``sys.path``. Non-existing items -are never added to ``sys.path``, but no check is made that the item refers to a -directory (rather than a file). No item is added to ``sys.path`` more than +are never added to ``sys.path``, and no check is made that the item refers to a +directory rather than a file. No item is added to ``sys.path`` more than once. Blank lines and lines beginning with ``#`` are skipped. Lines starting with ``import`` (followed by space or tab) are executed. @@ -42,8 +45,7 @@ For example, suppose ``sys.prefix`` and ``sys.exec_prefix`` are set to :file:`/usr/local`. The Python X.Y library is then installed in -:file:`/usr/local/lib/python{X.Y}` (where only the first three characters of -``sys.version`` are used to form the installation path name). Suppose this has +:file:`/usr/local/lib/python{X.Y}`. Suppose this has a subdirectory :file:`/usr/local/lib/python{X.Y}/site-packages` with three subsubdirectories, :file:`foo`, :file:`bar` and :file:`spam`, and two path configuration files, :file:`foo.pth` and :file:`bar.pth`. Assume @@ -76,74 +78,124 @@ After these path manipulations, an attempt is made to import a module named :mod:`sitecustomize`, which can perform arbitrary site-specific customizations. -If this import fails with an :exc:`ImportError` exception, it is silently -ignored. +It is typically created by a system administrator in the site-packages +directory. If this import fails with an :exc:`ImportError` exception, it is +silently ignored. -.. index:: module: sitecustomize +.. index:: module: usercustomize + +After this, an attempt is made to import a module named :mod:`usercustomize`, +which can perform arbitrary user-specific customizations, if +:data:`ENABLE_USER_SITE` is true. This file is intended to be created in the +user site-packages directory (see below), which is part of ``sys.path`` unless +disabled by :option:`-s`. An :exc:`ImportError` will be silently ignored. Note that for some non-Unix systems, ``sys.prefix`` and ``sys.exec_prefix`` are empty, and the path manipulations are skipped; however the import of -:mod:`sitecustomize` is still attempted. +:mod:`sitecustomize` and :mod:`usercustomize` is still attempted. .. data:: PREFIXES - A list of prefixes for site package directories + A list of prefixes for site-packages directories. .. data:: ENABLE_USER_SITE - Flag showing the status of the user site directory. True means the - user site directory is enabled and added to sys.path. When the flag - is None the user site directory is disabled for security reasons. + Flag showing the status of the user site-packages directory. ``True`` means + that it is enabled and was added to ``sys.path``. ``False`` means that it + was disabled by user request (with :option:`-s` or + :envvar:`PYTHONNOUSERSITE`). ``None`` means it was disabled for security + reasons (mismatch between user or group id and effective id) or by an + administrator. .. data:: USER_SITE - Path to the user site directory for the current Python version or None + Path to the user site-packages for the running Python. Can be ``None`` if + :func:`getusersitepackages` hasn't been called yet. Default value is + :file:`~/.local/lib/python{X.Y}/site-packages` for UNIX and non-framework Mac + OS X builds, :file:`~/Library/Python/{X.Y}/lib/python/site-packages` for Mac + framework builds, and :file:`{%APPDATA%}\\Python\\Python{XY}\\site-packages` + on Windows. This directory is a site directory, which means that + :file:`.pth` files in it will be processed. .. data:: USER_BASE - Path to the base directory for user site directories - - -.. envvar:: PYTHONNOUSERSITE - - -.. envvar:: PYTHONUSERBASE + Path to the base directory for the user site-packages. Can be ``None`` if + :func:`getuserbase` hasn't been called yet. Default value is + :file:`~/.local` for UNIX and Mac OS X non-framework builds, + :file:`~/Library/Python/{X.Y}` for Mac framework builds, and + :file:`{%APPDATA%}\\Python` for Windows. This value is used by Distutils to + compute the installation directories for scripts, data files, Python modules, + etc. for the :ref:`user installation scheme `. See + also :envvar:`PYTHONUSERBASE`. .. function:: addsitedir(sitedir, known_paths=None) - Adds a directory to sys.path and processes its pth files. + Add a directory to sys.path and process its :file:`.pth` files. Typically + used in :mod:`sitecustomize` or :mod:`usercustomize` (see above). + .. function:: getsitepackages() - Returns a list containing all global site-packages directories - (and possibly site-python). + Return a list containing all global site-packages directories (and possibly + site-python). .. versionadded:: 3.2 + .. function:: getuserbase() - Returns the "user base" directory path. - - The "user base" directory can be used to store data. If the global - variable ``USER_BASE`` is not initialized yet, this function will also set - it. + Return the path of the user base directory, :data:`USER_BASE`. If it is not + initialized yet, this function will also set it, respecting + :envvar:`PYTHONUSERBASE`. .. versionadded:: 3.2 + .. function:: getusersitepackages() - Returns the user-specific site-packages directory path. - - If the global variable ``USER_SITE`` is not initialized yet, this - function will also set it. + Return the path of the user-specific site-packages directory, + :data:`USER_SITE`. If it is not initialized yet, this function will also set + it, respecting :envvar:`PYTHONNOUSERSITE` and :data:`USER_BASE`. .. versionadded:: 3.2 -.. XXX Update documentation -.. XXX document python -m site --user-base --user-site +The :mod:`site` module also provides a way to get the user directories from the +command line: + +.. code-block:: sh + + $ python3 -m site --user-site + /home/user/.local/lib/python3.3/site-packages + +.. program:: site + +If it is called without arguments, it will print the contents of +:data:`sys.path` on the standard output, followed by the value of +:data:`USER_BASE` and whether the directory exists, then the same thing for +:data:`USER_SITE`, and finally the value of :data:`ENABLE_USER_SITE`. + +.. cmdoption:: --user-base + + Print the path to the user base directory. + +.. cmdoption:: --user-site + + Print the path to the user site-packages directory. + +If both options are given, user base and user site will be printed (always in +this order), separated by :data:`os.pathsep`. + +If any option is given, the script will exit with one of these values: ``O`` if +the user site-packages directory is enabled, ``1`` if it was disabled by the +user, ``2`` if it is disabled for security reasons or by an administrator, and a +value greater than 2 if there is an error. + +.. seealso:: + + :pep:`370` -- Per user site-packages directory diff --git a/Doc/tutorial/interactive.rst b/Doc/tutorial/interactive.rst --- a/Doc/tutorial/interactive.rst +++ b/Doc/tutorial/interactive.rst @@ -156,17 +156,18 @@ quotes, etc., would also be useful. One alternative enhanced interactive interpreter that has been around for quite -some time is `IPython`_, which features tab completion, object exploration and +some time is IPython_, which features tab completion, object exploration and advanced history management. It can also be thoroughly customized and embedded into other applications. Another similar enhanced interactive environment is -`bpython`_. +bpython_. .. rubric:: Footnotes .. [#] Python will execute the contents of a file identified by the :envvar:`PYTHONSTARTUP` environment variable when you start an interactive - interpreter. + interpreter. To customize Python even for non-interactive mode, see + :ref:`tut-customize`. .. _GNU Readline: http://tiswww.case.edu/php/chet/readline/rltop.html diff --git a/Doc/tutorial/interpreter.rst b/Doc/tutorial/interpreter.rst --- a/Doc/tutorial/interpreter.rst +++ b/Doc/tutorial/interpreter.rst @@ -236,6 +236,29 @@ exec(open(filename).read()) +.. _tut-customize: + +The Customization Modules +------------------------- + +Python provides two hooks to let you customize it: :mod:`sitecustomize` and +:mod:`usercustomize`. To see how it works, you need first to find the location +of your user site-packages directory. Start Python and run this code: + + >>> import site + >>> site.getusersitepackages() + '/home/user/.local/lib/python3.2/site-packages' + +Now you can create a file named :file:`usercustomize.py` in that directory and +put anything you want in it. It will affect every invocation of Python, unless +it is started with the :option:`-s` option to disable the automatic import. + +:mod:`sitecustomize` works in the same way, but is typically created by an +administrator of the computer in the global site-packages directory, and is +imported before :mod:`usercustomize`. See the documentation of the :mod:`site` +module for more details. + + .. rubric:: Footnotes .. [#] On Unix, the Python 3.x interpreter is by default not installed with the @@ -243,4 +266,3 @@ simultaneously installed Python 2.x executable. .. [#] A problem with the GNU Readline package may prevent this. - diff --git a/Doc/using/cmdline.rst b/Doc/using/cmdline.rst --- a/Doc/using/cmdline.rst +++ b/Doc/using/cmdline.rst @@ -229,7 +229,8 @@ .. cmdoption:: -s - Don't add user site directory to sys.path + Don't add the :data:`user site-packages directory ` to + :data:`sys.path`. .. seealso:: @@ -468,7 +469,8 @@ .. envvar:: PYTHONNOUSERSITE - If this is set, Python won't add the user site directory to sys.path + If this is set, Python won't add the :data:`user site-packages directory + ` to :data:`sys.path`. .. seealso:: @@ -477,7 +479,10 @@ .. envvar:: PYTHONUSERBASE - Sets the base directory for the user site directory + Defines the :data:`user base directory `, which is used to + compute the path of the :data:`user site-packages directory ` + and :ref:`Distutils installation paths ` for ``python + setup.py install --user``. .. seealso:: -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Tue Aug 9 18:11:20 2011 From: python-checkins at python.org (eric.araujo) Date: Tue, 09 Aug 2011 18:11:20 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=28merge_3=2E2_-=3E_default=29?= =?utf8?q?=3A_Merge_3=2E2?= Message-ID: http://hg.python.org/cpython/rev/69b354a8c90f changeset: 71787:69b354a8c90f parent: 71785:bc3c01a08e7d parent: 71786:7ea5d9c858f1 user: ?ric Araujo date: Tue Aug 09 18:09:21 2011 +0200 summary: Merge 3.2 files: -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Tue Aug 9 19:47:07 2011 From: python-checkins at python.org (georg.brandl) Date: Tue, 09 Aug 2011 19:47:07 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=282=2E7=29=3A_Repair_markup?= =?utf8?q?=2E?= Message-ID: http://hg.python.org/cpython/rev/4dbbca3ef2a5 changeset: 71788:4dbbca3ef2a5 branch: 2.7 parent: 71776:10f20ad2fbb6 user: Georg Brandl date: Tue Aug 09 19:47:04 2011 +0200 summary: Repair markup. files: Doc/c-api/init.rst | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Doc/c-api/init.rst b/Doc/c-api/init.rst --- a/Doc/c-api/init.rst +++ b/Doc/c-api/init.rst @@ -638,7 +638,7 @@ .. versionadded:: 2.3 -.. c:function:: PyThreadState PyGILState_GetThisThreadState() +.. cfunction:: PyThreadState PyGILState_GetThisThreadState() Get the current thread state for this thread. May return ``NULL`` if no GILState API has been used on the current thread. Note that the main thread -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Tue Aug 9 21:07:23 2011 From: python-checkins at python.org (raymond.hettinger) Date: Tue, 09 Aug 2011 21:07:23 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=282=2E7=29=3A_Issue_12717=3A_?= =?utf8?q?Fix-up_an_earlier_backport_in_ConfigParser=2E?= Message-ID: http://hg.python.org/cpython/rev/7d5a37ce42d5 changeset: 71789:7d5a37ce42d5 branch: 2.7 user: Raymond Hettinger date: Tue Aug 09 12:07:15 2011 -0700 summary: Issue 12717: Fix-up an earlier backport in ConfigParser. files: Lib/ConfigParser.py | 2 +- Lib/test/test_cfgparser.py | 22 ++++++++++++++++++++++ 2 files changed, 23 insertions(+), 1 deletions(-) diff --git a/Lib/ConfigParser.py b/Lib/ConfigParser.py --- a/Lib/ConfigParser.py +++ b/Lib/ConfigParser.py @@ -570,7 +570,7 @@ def keys(self): result = [] seen = set() - for mapping in self_maps: + for mapping in self._maps: for key in mapping: if key not in seen: result.append(key) diff --git a/Lib/test/test_cfgparser.py b/Lib/test/test_cfgparser.py --- a/Lib/test/test_cfgparser.py +++ b/Lib/test/test_cfgparser.py @@ -529,6 +529,27 @@ class SafeConfigParserTestCaseNoValue(SafeConfigParserTestCase): allow_no_value = True +class TestChainMap(unittest.TestCase): + def test_issue_12717(self): + d1 = dict(red=1, green=2) + d2 = dict(green=3, blue=4) + dcomb = d2.copy() + dcomb.update(d1) + cm = ConfigParser._Chainmap(d1, d2) + self.assertIsInstance(cm.keys(), list) + self.assertEqual(set(cm.keys()), set(dcomb.keys())) # keys() + self.assertEqual(set(cm.values()), set(dcomb.values())) # values() + self.assertEqual(set(cm.items()), set(dcomb.items())) # items() + self.assertEqual(set(cm), set(dcomb)) # __iter__ () + self.assertEqual(cm, dcomb) # __eq__() + self.assertEqual([cm[k] for k in dcomb], dcomb.values()) # __getitem__() + klist = 'red green blue black brown'.split() + self.assertEqual([cm.get(k, 10) for k in klist], + [dcomb.get(k, 10) for k in klist]) # get() + self.assertEqual([k in cm for k in klist], + [k in dcomb for k in klist]) # __contains__() + self.assertEqual([cm.has_key(k) for k in klist], + [dcomb.has_key(k) for k in klist]) # has_key() class Issue7005TestCase(unittest.TestCase): """Test output when None is set() as a value and allow_no_value == False. @@ -591,6 +612,7 @@ SafeConfigParserTestCaseNoValue, SortedTestCase, Issue7005TestCase, + TestChainMap, ) -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Tue Aug 9 22:00:50 2011 From: python-checkins at python.org (raymond.hettinger) Date: Tue, 09 Aug 2011 22:00:50 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Add_support_for_unary_plus_?= =?utf8?q?and_unary_minus_to_collections=2ECounter=28=29?= Message-ID: http://hg.python.org/cpython/rev/a5e48ce2f5a3 changeset: 71790:a5e48ce2f5a3 parent: 71787:69b354a8c90f user: Raymond Hettinger date: Tue Aug 09 13:00:40 2011 -0700 summary: Add support for unary plus and unary minus to collections.Counter() files: Doc/library/collections.rst | 14 +++++++++++++- Lib/collections/__init__.py | 11 +++++++++++ Lib/test/test_collections.py | 5 +++++ Misc/NEWS | 2 ++ 4 files changed, 31 insertions(+), 1 deletions(-) diff --git a/Doc/library/collections.rst b/Doc/library/collections.rst --- a/Doc/library/collections.rst +++ b/Doc/library/collections.rst @@ -264,7 +264,7 @@ c.items() # convert to a list of (elem, cnt) pairs Counter(dict(list_of_pairs)) # convert from a list of (elem, cnt) pairs c.most_common()[:-n:-1] # n least common elements - c += Counter() # remove zero and negative counts + +c # remove zero and negative counts Several mathematical operations are provided for combining :class:`Counter` objects to produce multisets (counters that have counts greater than zero). @@ -284,6 +284,18 @@ >>> c | d # union: max(c[x], d[x]) Counter({'a': 3, 'b': 2}) +Unary addition and substraction are shortcuts for adding an empty counter +or subtracting from an empty counter. + + >>> c = Counter(a=2, b=-4) + >>> +c + Counter({'a': 2}) + >>> -c + Counter({'b': 4}) + +.. versionadded:: 3.3 + Added support for unary plus and unary minus. + .. note:: Counters were primarily designed to work with positive integers to represent diff --git a/Lib/collections/__init__.py b/Lib/collections/__init__.py --- a/Lib/collections/__init__.py +++ b/Lib/collections/__init__.py @@ -672,6 +672,17 @@ result[elem] = newcount return result + def __pos__(self): + 'Adds an empty counter, effectively stripping negative and zero counts' + return self + Counter() + + def __neg__(self): + '''Subtracts from an empty counter. Strips positive and zero counts, + and flips the sign on negative counts. + + ''' + return Counter() - self + ######################################################################## ### ChainMap (helper for configparser and string.Template) diff --git a/Lib/test/test_collections.py b/Lib/test/test_collections.py --- a/Lib/test/test_collections.py +++ b/Lib/test/test_collections.py @@ -943,6 +943,11 @@ c.subtract('aaaabbcce') self.assertEqual(c, Counter(a=-1, b=0, c=-1, d=1, e=-1)) + def test_unary(self): + c = Counter(a=-5, b=0, c=5, d=10, e=15,g=40) + self.assertEqual(dict(+c), dict(c=5, d=10, e=15, g=40)) + self.assertEqual(dict(-c), dict(a=5)) + def test_helper_function(self): # two paths, one for real dicts and one for other mappings elems = list('abracadabra') diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -252,6 +252,8 @@ - Issue #12540: Prevent zombie IDLE processes on Windows due to changes in os.kill(). +- Add support for unary plus and unary minus to collections.Counter(). + - Issue #12683: urlparse updated to include svn as schemes that uses relative paths. (svn from 1.5 onwards support relative path). -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Tue Aug 9 23:17:23 2011 From: python-checkins at python.org (benjamin.peterson) Date: Tue, 09 Aug 2011 23:17:23 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=283=2E2=29=3A_note_mutating_t?= =?utf8?q?p=5Fdict_is_bad_=28closes_=2312719=29?= Message-ID: http://hg.python.org/cpython/rev/6ef65516fd7a changeset: 71791:6ef65516fd7a branch: 3.2 parent: 71786:7ea5d9c858f1 user: Benjamin Peterson date: Tue Aug 09 16:07:01 2011 -0500 summary: note mutating tp_dict is bad (closes #12719) files: Doc/c-api/typeobj.rst | 5 +++++ 1 files changed, 5 insertions(+), 0 deletions(-) diff --git a/Doc/c-api/typeobj.rst b/Doc/c-api/typeobj.rst --- a/Doc/c-api/typeobj.rst +++ b/Doc/c-api/typeobj.rst @@ -745,6 +745,11 @@ This field is not inherited by subtypes (though the attributes defined in here are inherited through a different mechanism). + .. warning:: + + It is not safe to use :c:func:`PyDict_SetItem` on or otherwise modify + :attr:`tp_dict` with the dictionary C-API. + .. c:member:: descrgetfunc PyTypeObject.tp_descr_get -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Tue Aug 9 23:17:24 2011 From: python-checkins at python.org (benjamin.peterson) Date: Tue, 09 Aug 2011 23:17:24 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_add_a_asdl_bytes_type=2C_so?= =?utf8?q?_Bytes=2Es_be_properly_typechecked?= Message-ID: http://hg.python.org/cpython/rev/f578ca44193d changeset: 71792:f578ca44193d parent: 71787:69b354a8c90f user: Benjamin Peterson date: Tue Aug 09 16:08:39 2011 -0500 summary: add a asdl bytes type, so Bytes.s be properly typechecked files: Include/asdl.h | 1 + Parser/Python.asdl | 3 ++- Parser/asdl.py | 2 +- Parser/asdl_c.py | 10 ++++++++++ Python/Python-ast.c | 18 ++++++++++++++---- 5 files changed, 28 insertions(+), 6 deletions(-) diff --git a/Include/asdl.h b/Include/asdl.h --- a/Include/asdl.h +++ b/Include/asdl.h @@ -3,6 +3,7 @@ typedef PyObject * identifier; typedef PyObject * string; +typedef PyObject * bytes; typedef PyObject * object; /* It would be nice if the code generated by asdl_c.py was completely diff --git a/Parser/Python.asdl b/Parser/Python.asdl --- a/Parser/Python.asdl +++ b/Parser/Python.asdl @@ -1,4 +1,4 @@ --- ASDL's four builtin types are identifier, int, string, object +-- ASDL's five builtin types are identifier, int, string, bytes, object module Python { @@ -67,7 +67,7 @@ expr? starargs, expr? kwargs) | Num(object n) -- a number as a PyObject. | Str(string s) -- need to specify raw, unicode, etc? - | Bytes(string s) + | Bytes(bytes s) | Ellipsis -- other literals? bools? diff --git a/Parser/asdl.py b/Parser/asdl.py --- a/Parser/asdl.py +++ b/Parser/asdl.py @@ -228,7 +228,7 @@ " field ::= Id ? " return Field(type[0], opt=True) -builtin_types = ("identifier", "string", "int", "bool", "object") +builtin_types = ("identifier", "string", "bytes", "int", "bool", "object") # below is a collection of classes to capture the AST of an AST :-) # not sure if any of the methods are useful yet, but I'm adding them diff --git a/Parser/asdl_c.py b/Parser/asdl_c.py --- a/Parser/asdl_c.py +++ b/Parser/asdl_c.py @@ -776,6 +776,7 @@ } #define ast2obj_identifier ast2obj_object #define ast2obj_string ast2obj_object +#define ast2obj_bytes ast2obj_object static PyObject* ast2obj_int(long b) { @@ -813,6 +814,15 @@ return obj2ast_object(obj, out, arena); } +static int obj2ast_bytes(PyObject* obj, PyObject** out, PyArena* arena) +{ + if (!PyBytes_CheckExact(obj)) { + PyErr_SetString(PyExc_TypeError, "AST bytes must be of type bytes"); + return 1; + } + return obj2ast_object(obj, out, arena); +} + static int obj2ast_int(PyObject* obj, int* out, PyArena* arena) { int i; diff --git a/Python/Python-ast.c b/Python/Python-ast.c --- a/Python/Python-ast.c +++ b/Python/Python-ast.c @@ -573,6 +573,7 @@ } #define ast2obj_identifier ast2obj_object #define ast2obj_string ast2obj_object +#define ast2obj_bytes ast2obj_object static PyObject* ast2obj_int(long b) { @@ -610,6 +611,15 @@ return obj2ast_object(obj, out, arena); } +static int obj2ast_bytes(PyObject* obj, PyObject** out, PyArena* arena) +{ + if (!PyBytes_CheckExact(obj)) { + PyErr_SetString(PyExc_TypeError, "AST bytes must be of type bytes"); + return 1; + } + return obj2ast_object(obj, out, arena); +} + static int obj2ast_int(PyObject* obj, int* out, PyArena* arena) { int i; @@ -1773,7 +1783,7 @@ } expr_ty -Bytes(string s, int lineno, int col_offset, PyArena *arena) +Bytes(bytes s, int lineno, int col_offset, PyArena *arena) { expr_ty p; if (!s) { @@ -2804,7 +2814,7 @@ case Bytes_kind: result = PyType_GenericNew(Bytes_type, NULL, NULL); if (!result) goto failed; - value = ast2obj_string(o->v.Bytes.s); + value = ast2obj_bytes(o->v.Bytes.s); if (!value) goto failed; if (PyObject_SetAttrString(result, "s", value) == -1) goto failed; @@ -5509,13 +5519,13 @@ return 1; } if (isinstance) { - string s; + bytes s; if (PyObject_HasAttrString(obj, "s")) { int res; tmp = PyObject_GetAttrString(obj, "s"); if (tmp == NULL) goto failed; - res = obj2ast_string(tmp, &s, arena); + res = obj2ast_bytes(tmp, &s, arena); if (res != 0) goto failed; Py_XDECREF(tmp); tmp = NULL; -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Tue Aug 9 23:17:25 2011 From: python-checkins at python.org (benjamin.peterson) Date: Tue, 09 Aug 2011 23:17:25 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_the_bool_asdl_type_died_a_w?= =?utf8?q?hile_ago?= Message-ID: http://hg.python.org/cpython/rev/ec9485e2acde changeset: 71793:ec9485e2acde user: Benjamin Peterson date: Tue Aug 09 16:10:09 2011 -0500 summary: the bool asdl type died a while ago files: Parser/asdl.py | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Parser/asdl.py b/Parser/asdl.py --- a/Parser/asdl.py +++ b/Parser/asdl.py @@ -228,7 +228,7 @@ " field ::= Id ? " return Field(type[0], opt=True) -builtin_types = ("identifier", "string", "bytes", "int", "bool", "object") +builtin_types = ("identifier", "string", "bytes", "int", "object") # below is a collection of classes to capture the AST of an AST :-) # not sure if any of the methods are useful yet, but I'm adding them -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Tue Aug 9 23:17:25 2011 From: python-checkins at python.org (benjamin.peterson) Date: Tue, 09 Aug 2011 23:17:25 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_forgotten_in_f578ca44193d?= Message-ID: http://hg.python.org/cpython/rev/dd69f0e5381d changeset: 71794:dd69f0e5381d user: Benjamin Peterson date: Tue Aug 09 16:14:45 2011 -0500 summary: forgotten in f578ca44193d files: Include/Python-ast.h | 4 ++-- 1 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Include/Python-ast.h b/Include/Python-ast.h --- a/Include/Python-ast.h +++ b/Include/Python-ast.h @@ -271,7 +271,7 @@ } Str; struct { - string s; + bytes s; } Bytes; struct { @@ -501,7 +501,7 @@ #define Str(a0, a1, a2, a3) _Py_Str(a0, a1, a2, a3) expr_ty _Py_Str(string s, int lineno, int col_offset, PyArena *arena); #define Bytes(a0, a1, a2, a3) _Py_Bytes(a0, a1, a2, a3) -expr_ty _Py_Bytes(string s, int lineno, int col_offset, PyArena *arena); +expr_ty _Py_Bytes(bytes s, int lineno, int col_offset, PyArena *arena); #define Ellipsis(a0, a1, a2) _Py_Ellipsis(a0, a1, a2) expr_ty _Py_Ellipsis(int lineno, int col_offset, PyArena *arena); #define Attribute(a0, a1, a2, a3, a4, a5) _Py_Attribute(a0, a1, a2, a3, a4, a5) -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Tue Aug 9 23:17:26 2011 From: python-checkins at python.org (benjamin.peterson) Date: Tue, 09 Aug 2011 23:17:26 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_add_a_AST_validator_=28clos?= =?utf8?q?es_=2312575=29?= Message-ID: http://hg.python.org/cpython/rev/4090dfdf91a4 changeset: 71795:4090dfdf91a4 user: Benjamin Peterson date: Tue Aug 09 16:15:04 2011 -0500 summary: add a AST validator (closes #12575) files: Include/ast.h | 1 + Lib/test/test_ast.py | 410 ++++++++++++++++++++++++++- Misc/NEWS | 2 + Python/ast.c | 486 ++++++++++++++++++++++++++++++- Python/bltinmodule.c | 4 + 5 files changed, 897 insertions(+), 6 deletions(-) diff --git a/Include/ast.h b/Include/ast.h --- a/Include/ast.h +++ b/Include/ast.h @@ -4,6 +4,7 @@ extern "C" { #endif +PyAPI_FUNC(int) PyAST_Validate(mod_ty); PyAPI_FUNC(mod_ty) PyAST_FromNode( const node *n, PyCompilerFlags *flags, diff --git a/Lib/test/test_ast.py b/Lib/test/test_ast.py --- a/Lib/test/test_ast.py +++ b/Lib/test/test_ast.py @@ -1,4 +1,6 @@ -import sys, unittest +import os +import sys +import unittest from test import support import ast @@ -490,8 +492,412 @@ self.assertEqual(ast.literal_eval('1.5 - 2j'), 1.5 - 2j) +class ASTValidatorTests(unittest.TestCase): + + def mod(self, mod, msg=None, mode="exec", *, exc=ValueError): + mod.lineno = mod.col_offset = 0 + ast.fix_missing_locations(mod) + with self.assertRaises(exc) as cm: + compile(mod, "", mode) + if msg is not None: + self.assertIn(msg, str(cm.exception)) + + def expr(self, node, msg=None, *, exc=ValueError): + mod = ast.Module([ast.Expr(node)]) + self.mod(mod, msg, exc=exc) + + def stmt(self, stmt, msg=None): + mod = ast.Module([stmt]) + self.mod(mod, msg) + + def test_module(self): + m = ast.Interactive([ast.Expr(ast.Name("x", ast.Store()))]) + self.mod(m, "must have Load context", "single") + m = ast.Expression(ast.Name("x", ast.Store())) + self.mod(m, "must have Load context", "eval") + + def _check_arguments(self, fac, check): + def arguments(args=None, vararg=None, varargannotation=None, + kwonlyargs=None, kwarg=None, kwargannotation=None, + defaults=None, kw_defaults=None): + if args is None: + args = [] + if kwonlyargs is None: + kwonlyargs = [] + if defaults is None: + defaults = [] + if kw_defaults is None: + kw_defaults = [] + args = ast.arguments(args, vararg, varargannotation, kwonlyargs, + kwarg, kwargannotation, defaults, kw_defaults) + return fac(args) + args = [ast.arg("x", ast.Name("x", ast.Store()))] + check(arguments(args=args), "must have Load context") + check(arguments(varargannotation=ast.Num(3)), + "varargannotation but no vararg") + check(arguments(varargannotation=ast.Name("x", ast.Store()), vararg="x"), + "must have Load context") + check(arguments(kwonlyargs=args), "must have Load context") + check(arguments(kwargannotation=ast.Num(42)), + "kwargannotation but no kwarg") + check(arguments(kwargannotation=ast.Name("x", ast.Store()), + kwarg="x"), "must have Load context") + check(arguments(defaults=[ast.Num(3)]), + "more positional defaults than args") + check(arguments(kw_defaults=[ast.Num(4)]), + "length of kwonlyargs is not the same as kw_defaults") + args = [ast.arg("x", ast.Name("x", ast.Load()))] + check(arguments(args=args, defaults=[ast.Name("x", ast.Store())]), + "must have Load context") + args = [ast.arg("a", ast.Name("x", ast.Load())), + ast.arg("b", ast.Name("y", ast.Load()))] + check(arguments(kwonlyargs=args, + kw_defaults=[None, ast.Name("x", ast.Store())]), + "must have Load context") + + def test_funcdef(self): + a = ast.arguments([], None, None, [], None, None, [], []) + f = ast.FunctionDef("x", a, [], [], None) + self.stmt(f, "empty body on FunctionDef") + f = ast.FunctionDef("x", a, [ast.Pass()], [ast.Name("x", ast.Store())], + None) + self.stmt(f, "must have Load context") + f = ast.FunctionDef("x", a, [ast.Pass()], [], + ast.Name("x", ast.Store())) + self.stmt(f, "must have Load context") + def fac(args): + return ast.FunctionDef("x", args, [ast.Pass()], [], None) + self._check_arguments(fac, self.stmt) + + def test_classdef(self): + def cls(bases=None, keywords=None, starargs=None, kwargs=None, + body=None, decorator_list=None): + if bases is None: + bases = [] + if keywords is None: + keywords = [] + if body is None: + body = [ast.Pass()] + if decorator_list is None: + decorator_list = [] + return ast.ClassDef("myclass", bases, keywords, starargs, + kwargs, body, decorator_list) + self.stmt(cls(bases=[ast.Name("x", ast.Store())]), + "must have Load context") + self.stmt(cls(keywords=[ast.keyword("x", ast.Name("x", ast.Store()))]), + "must have Load context") + self.stmt(cls(starargs=ast.Name("x", ast.Store())), + "must have Load context") + self.stmt(cls(kwargs=ast.Name("x", ast.Store())), + "must have Load context") + self.stmt(cls(body=[]), "empty body on ClassDef") + self.stmt(cls(body=[None]), "None disallowed") + self.stmt(cls(decorator_list=[ast.Name("x", ast.Store())]), + "must have Load context") + + def test_delete(self): + self.stmt(ast.Delete([]), "empty targets on Delete") + self.stmt(ast.Delete([None]), "None disallowed") + self.stmt(ast.Delete([ast.Name("x", ast.Load())]), + "must have Del context") + + def test_assign(self): + self.stmt(ast.Assign([], ast.Num(3)), "empty targets on Assign") + self.stmt(ast.Assign([None], ast.Num(3)), "None disallowed") + self.stmt(ast.Assign([ast.Name("x", ast.Load())], ast.Num(3)), + "must have Store context") + self.stmt(ast.Assign([ast.Name("x", ast.Store())], + ast.Name("y", ast.Store())), + "must have Load context") + + def test_augassign(self): + aug = ast.AugAssign(ast.Name("x", ast.Load()), ast.Add(), + ast.Name("y", ast.Load())) + self.stmt(aug, "must have Store context") + aug = ast.AugAssign(ast.Name("x", ast.Store()), ast.Add(), + ast.Name("y", ast.Store())) + self.stmt(aug, "must have Load context") + + def test_for(self): + x = ast.Name("x", ast.Store()) + y = ast.Name("y", ast.Load()) + p = ast.Pass() + self.stmt(ast.For(x, y, [], []), "empty body on For") + self.stmt(ast.For(ast.Name("x", ast.Load()), y, [p], []), + "must have Store context") + self.stmt(ast.For(x, ast.Name("y", ast.Store()), [p], []), + "must have Load context") + e = ast.Expr(ast.Name("x", ast.Store())) + self.stmt(ast.For(x, y, [e], []), "must have Load context") + self.stmt(ast.For(x, y, [p], [e]), "must have Load context") + + def test_while(self): + self.stmt(ast.While(ast.Num(3), [], []), "empty body on While") + self.stmt(ast.While(ast.Name("x", ast.Store()), [ast.Pass()], []), + "must have Load context") + self.stmt(ast.While(ast.Num(3), [ast.Pass()], + [ast.Expr(ast.Name("x", ast.Store()))]), + "must have Load context") + + def test_if(self): + self.stmt(ast.If(ast.Num(3), [], []), "empty body on If") + i = ast.If(ast.Name("x", ast.Store()), [ast.Pass()], []) + self.stmt(i, "must have Load context") + i = ast.If(ast.Num(3), [ast.Expr(ast.Name("x", ast.Store()))], []) + self.stmt(i, "must have Load context") + i = ast.If(ast.Num(3), [ast.Pass()], + [ast.Expr(ast.Name("x", ast.Store()))]) + self.stmt(i, "must have Load context") + + def test_with(self): + p = ast.Pass() + self.stmt(ast.With([], [p]), "empty items on With") + i = ast.withitem(ast.Num(3), None) + self.stmt(ast.With([i], []), "empty body on With") + i = ast.withitem(ast.Name("x", ast.Store()), None) + self.stmt(ast.With([i], [p]), "must have Load context") + i = ast.withitem(ast.Num(3), ast.Name("x", ast.Load())) + self.stmt(ast.With([i], [p]), "must have Store context") + + def test_raise(self): + r = ast.Raise(None, ast.Num(3)) + self.stmt(r, "Raise with cause but no exception") + r = ast.Raise(ast.Name("x", ast.Store()), None) + self.stmt(r, "must have Load context") + r = ast.Raise(ast.Num(4), ast.Name("x", ast.Store())) + self.stmt(r, "must have Load context") + + def test_try(self): + p = ast.Pass() + t = ast.Try([], [], [], [p]) + self.stmt(t, "empty body on Try") + t = ast.Try([ast.Expr(ast.Name("x", ast.Store()))], [], [], [p]) + self.stmt(t, "must have Load context") + t = ast.Try([p], [], [], []) + self.stmt(t, "Try has neither except handlers nor finalbody") + t = ast.Try([p], [], [p], [p]) + self.stmt(t, "Try has orelse but no except handlers") + t = ast.Try([p], [ast.ExceptHandler(None, "x", [])], [], []) + self.stmt(t, "empty body on ExceptHandler") + e = [ast.ExceptHandler(ast.Name("x", ast.Store()), "y", [p])] + self.stmt(ast.Try([p], e, [], []), "must have Load context") + e = [ast.ExceptHandler(None, "x", [p])] + t = ast.Try([p], e, [ast.Expr(ast.Name("x", ast.Store()))], [p]) + self.stmt(t, "must have Load context") + t = ast.Try([p], e, [p], [ast.Expr(ast.Name("x", ast.Store()))]) + self.stmt(t, "must have Load context") + + def test_assert(self): + self.stmt(ast.Assert(ast.Name("x", ast.Store()), None), + "must have Load context") + assrt = ast.Assert(ast.Name("x", ast.Load()), + ast.Name("y", ast.Store())) + self.stmt(assrt, "must have Load context") + + def test_import(self): + self.stmt(ast.Import([]), "empty names on Import") + + def test_importfrom(self): + imp = ast.ImportFrom(None, [ast.alias("x", None)], -42) + self.stmt(imp, "level less than -1") + self.stmt(ast.ImportFrom(None, [], 0), "empty names on ImportFrom") + + def test_global(self): + self.stmt(ast.Global([]), "empty names on Global") + + def test_nonlocal(self): + self.stmt(ast.Nonlocal([]), "empty names on Nonlocal") + + def test_expr(self): + e = ast.Expr(ast.Name("x", ast.Store())) + self.stmt(e, "must have Load context") + + def test_boolop(self): + b = ast.BoolOp(ast.And(), []) + self.expr(b, "less than 2 values") + b = ast.BoolOp(ast.And(), [ast.Num(3)]) + self.expr(b, "less than 2 values") + b = ast.BoolOp(ast.And(), [ast.Num(4), None]) + self.expr(b, "None disallowed") + b = ast.BoolOp(ast.And(), [ast.Num(4), ast.Name("x", ast.Store())]) + self.expr(b, "must have Load context") + + def test_unaryop(self): + u = ast.UnaryOp(ast.Not(), ast.Name("x", ast.Store())) + self.expr(u, "must have Load context") + + def test_lambda(self): + a = ast.arguments([], None, None, [], None, None, [], []) + self.expr(ast.Lambda(a, ast.Name("x", ast.Store())), + "must have Load context") + def fac(args): + return ast.Lambda(args, ast.Name("x", ast.Load())) + self._check_arguments(fac, self.expr) + + def test_ifexp(self): + l = ast.Name("x", ast.Load()) + s = ast.Name("y", ast.Store()) + for args in (s, l, l), (l, s, l), (l, l, s): + self.expr(ast.IfExp(*args), "must have Load context") + + def test_dict(self): + d = ast.Dict([], [ast.Name("x", ast.Load())]) + self.expr(d, "same number of keys as values") + d = ast.Dict([None], [ast.Name("x", ast.Load())]) + self.expr(d, "None disallowed") + d = ast.Dict([ast.Name("x", ast.Load())], [None]) + self.expr(d, "None disallowed") + + def test_set(self): + self.expr(ast.Set([None]), "None disallowed") + s = ast.Set([ast.Name("x", ast.Store())]) + self.expr(s, "must have Load context") + + def _check_comprehension(self, fac): + self.expr(fac([]), "comprehension with no generators") + g = ast.comprehension(ast.Name("x", ast.Load()), + ast.Name("x", ast.Load()), []) + self.expr(fac([g]), "must have Store context") + g = ast.comprehension(ast.Name("x", ast.Store()), + ast.Name("x", ast.Store()), []) + self.expr(fac([g]), "must have Load context") + x = ast.Name("x", ast.Store()) + y = ast.Name("y", ast.Load()) + g = ast.comprehension(x, y, [None]) + self.expr(fac([g]), "None disallowed") + g = ast.comprehension(x, y, [ast.Name("x", ast.Store())]) + self.expr(fac([g]), "must have Load context") + + def _simple_comp(self, fac): + g = ast.comprehension(ast.Name("x", ast.Store()), + ast.Name("x", ast.Load()), []) + self.expr(fac(ast.Name("x", ast.Store()), [g]), + "must have Load context") + def wrap(gens): + return fac(ast.Name("x", ast.Store()), gens) + self._check_comprehension(wrap) + + def test_listcomp(self): + self._simple_comp(ast.ListComp) + + def test_setcomp(self): + self._simple_comp(ast.SetComp) + + def test_generatorexp(self): + self._simple_comp(ast.GeneratorExp) + + def test_dictcomp(self): + g = ast.comprehension(ast.Name("y", ast.Store()), + ast.Name("p", ast.Load()), []) + c = ast.DictComp(ast.Name("x", ast.Store()), + ast.Name("y", ast.Load()), [g]) + self.expr(c, "must have Load context") + c = ast.DictComp(ast.Name("x", ast.Load()), + ast.Name("y", ast.Store()), [g]) + self.expr(c, "must have Load context") + def factory(comps): + k = ast.Name("x", ast.Load()) + v = ast.Name("y", ast.Load()) + return ast.DictComp(k, v, comps) + self._check_comprehension(factory) + + def test_yield(self): + self.expr(ast.Yield(ast.Name("x", ast.Store())), "must have Load") + + def test_compare(self): + left = ast.Name("x", ast.Load()) + comp = ast.Compare(left, [ast.In()], []) + self.expr(comp, "no comparators") + comp = ast.Compare(left, [ast.In()], [ast.Num(4), ast.Num(5)]) + self.expr(comp, "different number of comparators and operands") + comp = ast.Compare(ast.Num("blah"), [ast.In()], [left]) + self.expr(comp, "non-numeric", exc=TypeError) + comp = ast.Compare(left, [ast.In()], [ast.Num("blah")]) + self.expr(comp, "non-numeric", exc=TypeError) + + def test_call(self): + func = ast.Name("x", ast.Load()) + args = [ast.Name("y", ast.Load())] + keywords = [ast.keyword("w", ast.Name("z", ast.Load()))] + stararg = ast.Name("p", ast.Load()) + kwarg = ast.Name("q", ast.Load()) + call = ast.Call(ast.Name("x", ast.Store()), args, keywords, stararg, + kwarg) + self.expr(call, "must have Load context") + call = ast.Call(func, [None], keywords, stararg, kwarg) + self.expr(call, "None disallowed") + bad_keywords = [ast.keyword("w", ast.Name("z", ast.Store()))] + call = ast.Call(func, args, bad_keywords, stararg, kwarg) + self.expr(call, "must have Load context") + call = ast.Call(func, args, keywords, ast.Name("z", ast.Store()), kwarg) + self.expr(call, "must have Load context") + call = ast.Call(func, args, keywords, stararg, + ast.Name("w", ast.Store())) + self.expr(call, "must have Load context") + + def test_num(self): + class subint(int): + pass + class subfloat(float): + pass + class subcomplex(complex): + pass + for obj in "0", "hello", subint(), subfloat(), subcomplex(): + self.expr(ast.Num(obj), "non-numeric", exc=TypeError) + + def test_attribute(self): + attr = ast.Attribute(ast.Name("x", ast.Store()), "y", ast.Load()) + self.expr(attr, "must have Load context") + + def test_subscript(self): + sub = ast.Subscript(ast.Name("x", ast.Store()), ast.Index(ast.Num(3)), + ast.Load()) + self.expr(sub, "must have Load context") + x = ast.Name("x", ast.Load()) + sub = ast.Subscript(x, ast.Index(ast.Name("y", ast.Store())), + ast.Load()) + self.expr(sub, "must have Load context") + s = ast.Name("x", ast.Store()) + for args in (s, None, None), (None, s, None), (None, None, s): + sl = ast.Slice(*args) + self.expr(ast.Subscript(x, sl, ast.Load()), + "must have Load context") + sl = ast.ExtSlice([]) + self.expr(ast.Subscript(x, sl, ast.Load()), "empty dims on ExtSlice") + sl = ast.ExtSlice([ast.Index(s)]) + self.expr(ast.Subscript(x, sl, ast.Load()), "must have Load context") + + def test_starred(self): + left = ast.List([ast.Starred(ast.Name("x", ast.Load()), ast.Store())], + ast.Store()) + assign = ast.Assign([left], ast.Num(4)) + self.stmt(assign, "must have Store context") + + def _sequence(self, fac): + self.expr(fac([None], ast.Load()), "None disallowed") + self.expr(fac([ast.Name("x", ast.Store())], ast.Load()), + "must have Load context") + + def test_list(self): + self._sequence(ast.List) + + def test_tuple(self): + self._sequence(ast.Tuple) + + def test_stdlib_validates(self): + stdlib = os.path.dirname(ast.__file__) + tests = [fn for fn in os.listdir(stdlib) if fn.endswith(".py")] + tests.extend(["test/test_grammar.py", "test/test_unpack_ex.py"]) + for module in tests: + fn = os.path.join(stdlib, module) + with open(fn, "r", encoding="utf-8") as fp: + source = fp.read() + mod = ast.parse(source) + compile(mod, fn, "exec") + + def test_main(): - support.run_unittest(AST_Tests, ASTHelpers_Test) + support.run_unittest(AST_Tests, ASTHelpers_Test, ASTValidatorTests) def main(): if __name__ != '__main__': diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -10,6 +10,8 @@ Core and Builtins ----------------- +- Issue #12575: Validate user-generated AST before it is compiled. + - Make type(None), type(Ellipsis), and type(NotImplemented) callable. They return the respective singleton instances. diff --git a/Python/ast.c b/Python/ast.c --- a/Python/ast.c +++ b/Python/ast.c @@ -1,19 +1,497 @@ /* * This file includes functions to transform a concrete syntax tree (CST) to - * an abstract syntax tree (AST). The main function is PyAST_FromNode(). + * an abstract syntax tree (AST). The main function is PyAST_FromNode(). * */ #include "Python.h" #include "Python-ast.h" -#include "grammar.h" #include "node.h" #include "ast.h" #include "token.h" + +#include + +static int validate_stmts(asdl_seq *); +static int validate_exprs(asdl_seq *, expr_context_ty, int); +static int validate_nonempty_seq(asdl_seq *, const char *, const char *); +static int validate_stmt(stmt_ty); +static int validate_expr(expr_ty, expr_context_ty); + +static int +validate_comprehension(asdl_seq *gens) +{ + int i; + if (!asdl_seq_LEN(gens)) { + PyErr_SetString(PyExc_ValueError, "comprehension with no generators"); + return 0; + } + for (i = 0; i < asdl_seq_LEN(gens); i++) { + comprehension_ty comp = asdl_seq_GET(gens, i); + if (!validate_expr(comp->target, Store) || + !validate_expr(comp->iter, Load) || + !validate_exprs(comp->ifs, Load, 0)) + return 0; + } + return 1; +} + +static int +validate_slice(slice_ty slice) +{ + switch (slice->kind) { + case Slice_kind: + return (!slice->v.Slice.lower || validate_expr(slice->v.Slice.lower, Load)) && + (!slice->v.Slice.upper || validate_expr(slice->v.Slice.upper, Load)) && + (!slice->v.Slice.step || validate_expr(slice->v.Slice.step, Load)); + case ExtSlice_kind: { + int i; + if (!validate_nonempty_seq(slice->v.ExtSlice.dims, "dims", "ExtSlice")) + return 0; + for (i = 0; i < asdl_seq_LEN(slice->v.ExtSlice.dims); i++) + if (!validate_slice(asdl_seq_GET(slice->v.ExtSlice.dims, i))) + return 0; + return 1; + } + case Index_kind: + return validate_expr(slice->v.Index.value, Load); + default: + PyErr_SetString(PyExc_SystemError, "unknown slice node"); + return 0; + } +} + +static int +validate_keywords(asdl_seq *keywords) +{ + int i; + for (i = 0; i < asdl_seq_LEN(keywords); i++) + if (!validate_expr(((keyword_ty)asdl_seq_GET(keywords, i))->value, Load)) + return 0; + return 1; +} + +static int +validate_args(asdl_seq *args) +{ + int i; + for (i = 0; i < asdl_seq_LEN(args); i++) { + arg_ty arg = asdl_seq_GET(args, i); + if (arg->annotation && !validate_expr(arg->annotation, Load)) + return 0; + } + return 1; +} + +static const char * +expr_context_name(expr_context_ty ctx) +{ + switch (ctx) { + case Load: + return "Load"; + case Store: + return "Store"; + case Del: + return "Del"; + case AugLoad: + return "AugLoad"; + case AugStore: + return "AugStore"; + case Param: + return "Param"; + default: + assert(0); + return "(unknown)"; + } +} + +static int +validate_arguments(arguments_ty args) +{ + if (!validate_args(args->args)) + return 0; + if (args->varargannotation) { + if (!args->vararg) { + PyErr_SetString(PyExc_ValueError, "varargannotation but no vararg on arguments"); + return 0; + } + if (!validate_expr(args->varargannotation, Load)) + return 0; + } + if (!validate_args(args->kwonlyargs)) + return 0; + if (args->kwargannotation) { + if (!args->kwarg) { + PyErr_SetString(PyExc_ValueError, "kwargannotation but no kwarg on arguments"); + return 0; + } + if (!validate_expr(args->kwargannotation, Load)) + return 0; + } + if (asdl_seq_LEN(args->defaults) > asdl_seq_LEN(args->args)) { + PyErr_SetString(PyExc_ValueError, "more positional defaults than args on arguments"); + return 0; + } + if (asdl_seq_LEN(args->kw_defaults) != asdl_seq_LEN(args->kwonlyargs)) { + PyErr_SetString(PyExc_ValueError, "length of kwonlyargs is not the same as " + "kw_defaults on arguments"); + return 0; + } + return validate_exprs(args->defaults, Load, 0) && validate_exprs(args->kw_defaults, Load, 1); +} + +static int +validate_expr(expr_ty exp, expr_context_ty ctx) +{ + int check_ctx = 1; + expr_context_ty actual_ctx; + + /* First check expression context. */ + switch (exp->kind) { + case Attribute_kind: + actual_ctx = exp->v.Attribute.ctx; + break; + case Subscript_kind: + actual_ctx = exp->v.Subscript.ctx; + break; + case Starred_kind: + actual_ctx = exp->v.Starred.ctx; + break; + case Name_kind: + actual_ctx = exp->v.Name.ctx; + break; + case List_kind: + actual_ctx = exp->v.List.ctx; + break; + case Tuple_kind: + actual_ctx = exp->v.Tuple.ctx; + break; + default: + if (ctx != Load) { + PyErr_Format(PyExc_ValueError, "expression which can't be " + "assigned to in %s context", expr_context_name(ctx)); + return 0; + } + check_ctx = 0; + } + if (check_ctx && actual_ctx != ctx) { + PyErr_Format(PyExc_ValueError, "expression must have %s context but has %s instead", + expr_context_name(ctx), expr_context_name(actual_ctx)); + return 0; + } + + /* Now validate expression. */ + switch (exp->kind) { + case BoolOp_kind: + if (asdl_seq_LEN(exp->v.BoolOp.values) < 2) { + PyErr_SetString(PyExc_ValueError, "BoolOp with less than 2 values"); + return 0; + } + return validate_exprs(exp->v.BoolOp.values, Load, 0); + case BinOp_kind: + return validate_expr(exp->v.BinOp.left, Load) && + validate_expr(exp->v.BinOp.right, Load); + case UnaryOp_kind: + return validate_expr(exp->v.UnaryOp.operand, Load); + case Lambda_kind: + return validate_arguments(exp->v.Lambda.args) && + validate_expr(exp->v.Lambda.body, Load); + case IfExp_kind: + return validate_expr(exp->v.IfExp.test, Load) && + validate_expr(exp->v.IfExp.body, Load) && + validate_expr(exp->v.IfExp.orelse, Load); + case Dict_kind: + if (asdl_seq_LEN(exp->v.Dict.keys) != asdl_seq_LEN(exp->v.Dict.values)) { + PyErr_SetString(PyExc_ValueError, + "Dict doesn't have the same number of keys as values"); + return 0; + } + return validate_exprs(exp->v.Dict.keys, Load, 0) && + validate_exprs(exp->v.Dict.values, Load, 0); + case Set_kind: + return validate_exprs(exp->v.Set.elts, Load, 0); +#define COMP(NAME) \ + case NAME ## _kind: \ + return validate_comprehension(exp->v.NAME.generators) && \ + validate_expr(exp->v.NAME.elt, Load); + COMP(ListComp) + COMP(SetComp) + COMP(GeneratorExp) +#undef COMP + case DictComp_kind: + return validate_comprehension(exp->v.DictComp.generators) && + validate_expr(exp->v.DictComp.key, Load) && + validate_expr(exp->v.DictComp.value, Load); + case Yield_kind: + return !exp->v.Yield.value || validate_expr(exp->v.Yield.value, Load); + case Compare_kind: + if (!asdl_seq_LEN(exp->v.Compare.comparators)) { + PyErr_SetString(PyExc_ValueError, "Compare with no comparators"); + return 0; + } + if (asdl_seq_LEN(exp->v.Compare.comparators) != + asdl_seq_LEN(exp->v.Compare.ops)) { + PyErr_SetString(PyExc_ValueError, "Compare has a different number " + "of comparators and operands"); + return 0; + } + return validate_exprs(exp->v.Compare.comparators, Load, 0) && + validate_expr(exp->v.Compare.left, Load); + case Call_kind: + return validate_expr(exp->v.Call.func, Load) && + validate_exprs(exp->v.Call.args, Load, 0) && + validate_keywords(exp->v.Call.keywords) && + (!exp->v.Call.starargs || validate_expr(exp->v.Call.starargs, Load)) && + (!exp->v.Call.kwargs || validate_expr(exp->v.Call.kwargs, Load)); + case Num_kind: { + PyObject *n = exp->v.Num.n; + if (!PyLong_CheckExact(n) && !PyFloat_CheckExact(n) && + !PyComplex_CheckExact(n)) { + PyErr_SetString(PyExc_TypeError, "non-numeric type in Num"); + return 0; + } + return 1; + } + case Str_kind: { + PyObject *s = exp->v.Str.s; + if (!PyUnicode_CheckExact(s)) { + PyErr_SetString(PyExc_TypeError, "non-string type in Str"); + return 0; + } + return 1; + } + case Bytes_kind: { + PyObject *b = exp->v.Bytes.s; + if (!PyBytes_CheckExact(b)) { + PyErr_SetString(PyExc_TypeError, "non-bytes type in Bytes"); + return 0; + } + return 1; + } + case Attribute_kind: + return validate_expr(exp->v.Attribute.value, Load); + case Subscript_kind: + return validate_slice(exp->v.Subscript.slice) && + validate_expr(exp->v.Subscript.value, Load); + case Starred_kind: + return validate_expr(exp->v.Starred.value, ctx); + case List_kind: + return validate_exprs(exp->v.List.elts, ctx, 0); + case Tuple_kind: + return validate_exprs(exp->v.Tuple.elts, ctx, 0); + /* These last cases don't have any checking. */ + case Name_kind: + case Ellipsis_kind: + return 1; + default: + PyErr_SetString(PyExc_SystemError, "unexpected expression"); + return 0; + } +} + +static int +validate_nonempty_seq(asdl_seq *seq, const char *what, const char *owner) +{ + if (asdl_seq_LEN(seq)) + return 1; + PyErr_Format(PyExc_ValueError, "empty %s on %s", what, owner); + return 0; +} + +static int +validate_assignlist(asdl_seq *targets, expr_context_ty ctx) +{ + return validate_nonempty_seq(targets, "targets", ctx == Del ? "Delete" : "Assign") && + validate_exprs(targets, ctx, 0); +} + +static int +validate_body(asdl_seq *body, const char *owner) +{ + return validate_nonempty_seq(body, "body", owner) && validate_stmts(body); +} + +static int +validate_stmt(stmt_ty stmt) +{ + int i; + switch (stmt->kind) { + case FunctionDef_kind: + return validate_body(stmt->v.FunctionDef.body, "FunctionDef") && + validate_arguments(stmt->v.FunctionDef.args) && + validate_exprs(stmt->v.FunctionDef.decorator_list, Load, 0) && + (!stmt->v.FunctionDef.returns || + validate_expr(stmt->v.FunctionDef.returns, Load)); + case ClassDef_kind: + return validate_body(stmt->v.ClassDef.body, "ClassDef") && + validate_exprs(stmt->v.ClassDef.bases, Load, 0) && + validate_keywords(stmt->v.ClassDef.keywords) && + validate_exprs(stmt->v.ClassDef.decorator_list, Load, 0) && + (!stmt->v.ClassDef.starargs || validate_expr(stmt->v.ClassDef.starargs, Load)) && + (!stmt->v.ClassDef.kwargs || validate_expr(stmt->v.ClassDef.kwargs, Load)); + case Return_kind: + return !stmt->v.Return.value || validate_expr(stmt->v.Return.value, Load); + case Delete_kind: + return validate_assignlist(stmt->v.Delete.targets, Del); + case Assign_kind: + return validate_assignlist(stmt->v.Assign.targets, Store) && + validate_expr(stmt->v.Assign.value, Load); + case AugAssign_kind: + return validate_expr(stmt->v.AugAssign.target, Store) && + validate_expr(stmt->v.AugAssign.value, Load); + case For_kind: + return validate_expr(stmt->v.For.target, Store) && + validate_expr(stmt->v.For.iter, Load) && + validate_body(stmt->v.For.body, "For") && + validate_stmts(stmt->v.For.orelse); + case While_kind: + return validate_expr(stmt->v.While.test, Load) && + validate_body(stmt->v.While.body, "While") && + validate_stmts(stmt->v.While.orelse); + case If_kind: + return validate_expr(stmt->v.If.test, Load) && + validate_body(stmt->v.If.body, "If") && + validate_stmts(stmt->v.If.orelse); + case With_kind: + if (!validate_nonempty_seq(stmt->v.With.items, "items", "With")) + return 0; + for (i = 0; i < asdl_seq_LEN(stmt->v.With.items); i++) { + withitem_ty item = asdl_seq_GET(stmt->v.With.items, i); + if (!validate_expr(item->context_expr, Load) || + (item->optional_vars && !validate_expr(item->optional_vars, Store))) + return 0; + } + return validate_body(stmt->v.With.body, "With"); + case Raise_kind: + if (stmt->v.Raise.exc) { + return validate_expr(stmt->v.Raise.exc, Load) && + (!stmt->v.Raise.cause || validate_expr(stmt->v.Raise.cause, Load)); + } + if (stmt->v.Raise.cause) { + PyErr_SetString(PyExc_ValueError, "Raise with cause but no exception"); + return 0; + } + return 1; + case Try_kind: + if (!validate_body(stmt->v.Try.body, "Try")) + return 0; + if (!asdl_seq_LEN(stmt->v.Try.handlers) && + !asdl_seq_LEN(stmt->v.Try.finalbody)) { + PyErr_SetString(PyExc_ValueError, "Try has neither except handlers nor finalbody"); + return 0; + } + if (!asdl_seq_LEN(stmt->v.Try.handlers) && + asdl_seq_LEN(stmt->v.Try.orelse)) { + PyErr_SetString(PyExc_ValueError, "Try has orelse but no except handlers"); + return 0; + } + for (i = 0; i < asdl_seq_LEN(stmt->v.Try.handlers); i++) { + excepthandler_ty handler = asdl_seq_GET(stmt->v.Try.handlers, i); + if ((handler->v.ExceptHandler.type && + !validate_expr(handler->v.ExceptHandler.type, Load)) || + !validate_body(handler->v.ExceptHandler.body, "ExceptHandler")) + return 0; + } + return (!asdl_seq_LEN(stmt->v.Try.finalbody) || + validate_stmts(stmt->v.Try.finalbody)) && + (!asdl_seq_LEN(stmt->v.Try.orelse) || + validate_stmts(stmt->v.Try.orelse)); + case Assert_kind: + return validate_expr(stmt->v.Assert.test, Load) && + (!stmt->v.Assert.msg || validate_expr(stmt->v.Assert.msg, Load)); + case Import_kind: + return validate_nonempty_seq(stmt->v.Import.names, "names", "Import"); + case ImportFrom_kind: + if (stmt->v.ImportFrom.level < -1) { + PyErr_SetString(PyExc_ValueError, "ImportFrom level less than -1"); + return 0; + } + return validate_nonempty_seq(stmt->v.ImportFrom.names, "names", "ImportFrom"); + case Global_kind: + return validate_nonempty_seq(stmt->v.Global.names, "names", "Global"); + case Nonlocal_kind: + return validate_nonempty_seq(stmt->v.Nonlocal.names, "names", "Nonlocal"); + case Expr_kind: + return validate_expr(stmt->v.Expr.value, Load); + case Pass_kind: + case Break_kind: + case Continue_kind: + return 1; + default: + PyErr_SetString(PyExc_SystemError, "unexpected statement"); + return 0; + } +} + +static int +validate_stmts(asdl_seq *seq) +{ + int i; + for (i = 0; i < asdl_seq_LEN(seq); i++) { + stmt_ty stmt = asdl_seq_GET(seq, i); + if (stmt) { + if (!validate_stmt(stmt)) + return 0; + } + else { + PyErr_SetString(PyExc_ValueError, + "None disallowed in statement list"); + return 0; + } + } + return 1; +} + +static int +validate_exprs(asdl_seq *exprs, expr_context_ty ctx, int null_ok) +{ + int i; + for (i = 0; i < asdl_seq_LEN(exprs); i++) { + expr_ty expr = asdl_seq_GET(exprs, i); + if (expr) { + if (!validate_expr(expr, ctx)) + return 0; + } + else if (!null_ok) { + PyErr_SetString(PyExc_ValueError, + "None disallowed in expression list"); + return 0; + } + + } + return 1; +} + +int +PyAST_Validate(mod_ty mod) +{ + int res = 0; + + switch (mod->kind) { + case Module_kind: + res = validate_stmts(mod->v.Module.body); + break; + case Interactive_kind: + res = validate_stmts(mod->v.Interactive.body); + break; + case Expression_kind: + res = validate_expr(mod->v.Expression.body, Load); + break; + case Suite_kind: + PyErr_SetString(PyExc_ValueError, "Suite is not valid in the CPython compiler"); + break; + default: + PyErr_SetString(PyExc_SystemError, "impossible module node"); + res = 0; + break; + } + return res; +} + +/* This is down here, so defines like "test" don't intefere with access AST above. */ +#include "grammar.h" #include "parsetok.h" #include "graminit.h" -#include - /* Data structure used internally */ struct compiling { char *c_encoding; /* source encoding */ diff --git a/Python/bltinmodule.c b/Python/bltinmodule.c --- a/Python/bltinmodule.c +++ b/Python/bltinmodule.c @@ -604,6 +604,10 @@ PyArena_Free(arena); goto error; } + if (!PyAST_Validate(mod)) { + PyArena_Free(arena); + goto error; + } result = (PyObject*)PyAST_CompileEx(mod, filename, &cf, optimize, arena); PyArena_Free(arena); -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Tue Aug 9 23:17:27 2011 From: python-checkins at python.org (benjamin.peterson) Date: Tue, 09 Aug 2011 23:17:27 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=28merge_3=2E2_-=3E_default=29?= =?utf8?b?OiBtZXJnZSAzLjIgKCMxMjcxOSk=?= Message-ID: http://hg.python.org/cpython/rev/69df5a8d164e changeset: 71796:69df5a8d164e parent: 71795:4090dfdf91a4 parent: 71791:6ef65516fd7a user: Benjamin Peterson date: Tue Aug 09 16:15:29 2011 -0500 summary: merge 3.2 (#12719) files: Doc/c-api/typeobj.rst | 5 +++++ 1 files changed, 5 insertions(+), 0 deletions(-) diff --git a/Doc/c-api/typeobj.rst b/Doc/c-api/typeobj.rst --- a/Doc/c-api/typeobj.rst +++ b/Doc/c-api/typeobj.rst @@ -745,6 +745,11 @@ This field is not inherited by subtypes (though the attributes defined in here are inherited through a different mechanism). + .. warning:: + + It is not safe to use :c:func:`PyDict_SetItem` on or otherwise modify + :attr:`tp_dict` with the dictionary C-API. + .. c:member:: descrgetfunc PyTypeObject.tp_descr_get -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Tue Aug 9 23:17:28 2011 From: python-checkins at python.org (benjamin.peterson) Date: Tue, 09 Aug 2011 23:17:28 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=28merge_default_-=3E_default?= =?utf8?q?=29=3A_merge_heads?= Message-ID: http://hg.python.org/cpython/rev/bea1e021902e changeset: 71797:bea1e021902e parent: 71796:69df5a8d164e parent: 71790:a5e48ce2f5a3 user: Benjamin Peterson date: Tue Aug 09 16:16:33 2011 -0500 summary: merge heads files: Doc/library/collections.rst | 14 +++++++++++++- Lib/collections/__init__.py | 11 +++++++++++ Lib/test/test_collections.py | 5 +++++ Misc/NEWS | 2 ++ 4 files changed, 31 insertions(+), 1 deletions(-) diff --git a/Doc/library/collections.rst b/Doc/library/collections.rst --- a/Doc/library/collections.rst +++ b/Doc/library/collections.rst @@ -264,7 +264,7 @@ c.items() # convert to a list of (elem, cnt) pairs Counter(dict(list_of_pairs)) # convert from a list of (elem, cnt) pairs c.most_common()[:-n:-1] # n least common elements - c += Counter() # remove zero and negative counts + +c # remove zero and negative counts Several mathematical operations are provided for combining :class:`Counter` objects to produce multisets (counters that have counts greater than zero). @@ -284,6 +284,18 @@ >>> c | d # union: max(c[x], d[x]) Counter({'a': 3, 'b': 2}) +Unary addition and substraction are shortcuts for adding an empty counter +or subtracting from an empty counter. + + >>> c = Counter(a=2, b=-4) + >>> +c + Counter({'a': 2}) + >>> -c + Counter({'b': 4}) + +.. versionadded:: 3.3 + Added support for unary plus and unary minus. + .. note:: Counters were primarily designed to work with positive integers to represent diff --git a/Lib/collections/__init__.py b/Lib/collections/__init__.py --- a/Lib/collections/__init__.py +++ b/Lib/collections/__init__.py @@ -672,6 +672,17 @@ result[elem] = newcount return result + def __pos__(self): + 'Adds an empty counter, effectively stripping negative and zero counts' + return self + Counter() + + def __neg__(self): + '''Subtracts from an empty counter. Strips positive and zero counts, + and flips the sign on negative counts. + + ''' + return Counter() - self + ######################################################################## ### ChainMap (helper for configparser and string.Template) diff --git a/Lib/test/test_collections.py b/Lib/test/test_collections.py --- a/Lib/test/test_collections.py +++ b/Lib/test/test_collections.py @@ -943,6 +943,11 @@ c.subtract('aaaabbcce') self.assertEqual(c, Counter(a=-1, b=0, c=-1, d=1, e=-1)) + def test_unary(self): + c = Counter(a=-5, b=0, c=5, d=10, e=15,g=40) + self.assertEqual(dict(+c), dict(c=5, d=10, e=15, g=40)) + self.assertEqual(dict(-c), dict(a=5)) + def test_helper_function(self): # two paths, one for real dicts and one for other mappings elems = list('abracadabra') diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -254,6 +254,8 @@ - Issue #12540: Prevent zombie IDLE processes on Windows due to changes in os.kill(). +- Add support for unary plus and unary minus to collections.Counter(). + - Issue #12683: urlparse updated to include svn as schemes that uses relative paths. (svn from 1.5 onwards support relative path). -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Tue Aug 9 23:17:29 2011 From: python-checkins at python.org (benjamin.peterson) Date: Tue, 09 Aug 2011 23:17:29 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_fix_indentation?= Message-ID: http://hg.python.org/cpython/rev/d3d6da4a1e27 changeset: 71798:d3d6da4a1e27 user: Benjamin Peterson date: Tue Aug 09 16:17:12 2011 -0500 summary: fix indentation files: Lib/test/test_ast.py | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Lib/test/test_ast.py b/Lib/test/test_ast.py --- a/Lib/test/test_ast.py +++ b/Lib/test/test_ast.py @@ -738,7 +738,7 @@ l = ast.Name("x", ast.Load()) s = ast.Name("y", ast.Store()) for args in (s, l, l), (l, s, l), (l, l, s): - self.expr(ast.IfExp(*args), "must have Load context") + self.expr(ast.IfExp(*args), "must have Load context") def test_dict(self): d = ast.Dict([], [ast.Name("x", ast.Load())]) -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Tue Aug 9 23:29:07 2011 From: python-checkins at python.org (benjamin.peterson) Date: Tue, 09 Aug 2011 23:29:07 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_don=27t_rewrite_the_header_?= =?utf8?q?file_if_it_hasn=27t_changed=3B_this_reduces_development?= Message-ID: http://hg.python.org/cpython/rev/d80ce5f549c1 changeset: 71799:d80ce5f549c1 user: Benjamin Peterson date: Tue Aug 09 16:28:58 2011 -0500 summary: don't rewrite the header file if it hasn't changed; this reduces development build time files: Parser/asdl_c.py | 13 ++++++++++--- 1 files changed, 10 insertions(+), 3 deletions(-) diff --git a/Parser/asdl_c.py b/Parser/asdl_c.py --- a/Parser/asdl_c.py +++ b/Parser/asdl_c.py @@ -4,7 +4,9 @@ # TO DO # handle fields that have a type but no name -import os, sys +import os +import sys +import StringIO import subprocess import asdl @@ -1155,7 +1157,7 @@ sys.exit(1) if INC_DIR: p = "%s/%s-ast.h" % (INC_DIR, mod.name) - f = open(p, "w") + f = StringIO.StringIO() f.write(auto_gen_msg) f.write('#include "asdl.h"\n\n') c = ChainOfVisitors(TypeDefVisitor(f), @@ -1166,7 +1168,12 @@ f.write("PyObject* PyAST_mod2obj(mod_ty t);\n") f.write("mod_ty PyAST_obj2mod(PyObject* ast, PyArena* arena, int mode);\n") f.write("int PyAST_Check(PyObject* obj);\n") - f.close() + s = f.getvalue() + with open(p, "r") as fp: + write = fp.read() != s + if write: + with open(p, "w") as fp: + f.write(s) if SRC_DIR: p = os.path.join(SRC_DIR, str(mod.name) + "-ast.c") -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Tue Aug 9 23:49:24 2011 From: python-checkins at python.org (benjamin.peterson) Date: Tue, 09 Aug 2011 23:49:24 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_typo?= Message-ID: http://hg.python.org/cpython/rev/92c76d421c72 changeset: 71800:92c76d421c72 user: Benjamin Peterson date: Tue Aug 09 16:49:13 2011 -0500 summary: typo files: Modules/posixmodule.c | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Modules/posixmodule.c b/Modules/posixmodule.c --- a/Modules/posixmodule.c +++ b/Modules/posixmodule.c @@ -5072,7 +5072,7 @@ pid_t pid; Py_cpu_set *cpu_set; - if (!PyArg_ParseTuple(args, _Py_PARSE_PID "O!:schbed_setaffinity", + if (!PyArg_ParseTuple(args, _Py_PARSE_PID "O!:sched_setaffinity", &pid, &cpu_set_type, &cpu_set)) return NULL; if (sched_setaffinity(pid, cpu_set->size, cpu_set->set)) -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Tue Aug 9 23:50:02 2011 From: python-checkins at python.org (benjamin.peterson) Date: Tue, 09 Aug 2011 23:50:02 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_make_this_work_with_py2=2E5?= Message-ID: http://hg.python.org/cpython/rev/334cc98de85b changeset: 71801:334cc98de85b user: Benjamin Peterson date: Tue Aug 09 16:49:52 2011 -0500 summary: make this work with py2.5 files: Parser/asdl_c.py | 1 + 1 files changed, 1 insertions(+), 0 deletions(-) diff --git a/Parser/asdl_c.py b/Parser/asdl_c.py --- a/Parser/asdl_c.py +++ b/Parser/asdl_c.py @@ -1,5 +1,6 @@ #! /usr/bin/env python """Generate C code from an ASDL description.""" +from __future__ import with_statement # TO DO # handle fields that have a type but no name -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Wed Aug 10 01:39:10 2011 From: python-checkins at python.org (benjamin.peterson) Date: Wed, 10 Aug 2011 01:39:10 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_make_this_work_on_2=2E4?= Message-ID: http://hg.python.org/cpython/rev/0312dd629797 changeset: 71802:0312dd629797 user: Benjamin Peterson date: Tue Aug 09 18:38:57 2011 -0500 summary: make this work on 2.4 files: Parser/asdl_c.py | 22 +++++++++++++++++----- 1 files changed, 17 insertions(+), 5 deletions(-) diff --git a/Parser/asdl_c.py b/Parser/asdl_c.py --- a/Parser/asdl_c.py +++ b/Parser/asdl_c.py @@ -1,10 +1,10 @@ #! /usr/bin/env python """Generate C code from an ASDL description.""" -from __future__ import with_statement # TO DO # handle fields that have a type but no name +import errno import os import sys import StringIO @@ -1170,11 +1170,23 @@ f.write("mod_ty PyAST_obj2mod(PyObject* ast, PyArena* arena, int mode);\n") f.write("int PyAST_Check(PyObject* obj);\n") s = f.getvalue() - with open(p, "r") as fp: - write = fp.read() != s + write = True + try: + fp = open(p, "r") + except IOError as e: + if e.errno != errno.ENOENT: + raise + else: + try: + write = fp.read() != s + finally: + fp.close() if write: - with open(p, "w") as fp: - f.write(s) + fp = open(p, "w") + try: + fp.write(s) + finally: + fp.close() if SRC_DIR: p = os.path.join(SRC_DIR, str(mod.name) + "-ast.c") -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Wed Aug 10 01:48:11 2011 From: python-checkins at python.org (benjamin.peterson) Date: Wed, 10 Aug 2011 01:48:11 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_2=2Ex_syntax?= Message-ID: http://hg.python.org/cpython/rev/1b4fae183da3 changeset: 71803:1b4fae183da3 user: Benjamin Peterson date: Tue Aug 09 18:48:02 2011 -0500 summary: 2.x syntax files: Parser/asdl_c.py | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Parser/asdl_c.py b/Parser/asdl_c.py --- a/Parser/asdl_c.py +++ b/Parser/asdl_c.py @@ -1173,7 +1173,7 @@ write = True try: fp = open(p, "r") - except IOError as e: + except IOError, e: if e.errno != errno.ENOENT: raise else: -- Repository URL: http://hg.python.org/cpython From solipsis at pitrou.net Wed Aug 10 05:23:06 2011 From: solipsis at pitrou.net (solipsis at pitrou.net) Date: Wed, 10 Aug 2011 05:23:06 +0200 Subject: [Python-checkins] Daily reference leaks (1b4fae183da3): sum=0 Message-ID: results for 1b4fae183da3 on branch "default" -------------------------------------------- Command line was: ['./python', '-m', 'test.regrtest', '-uall', '-R', '3:3:/home/antoine/cpython/refleaks/reflogf68euS', '-x'] From python-checkins at python.org Wed Aug 10 15:55:21 2011 From: python-checkins at python.org (senthil.kumaran) Date: Wed, 10 Aug 2011 15:55:21 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=282=2E7=29=3A_Fix_closes_Issu?= =?utf8?q?e12722__-_link_heapq_source_in_the_text_format_in_the?= Message-ID: http://hg.python.org/cpython/rev/9f482b957d77 changeset: 71804:9f482b957d77 branch: 2.7 parent: 71789:7d5a37ce42d5 user: Senthil Kumaran date: Wed Aug 10 21:54:56 2011 +0800 summary: Fix closes Issue12722 - link heapq source in the text format in the documentation. files: Doc/library/heapq.rst | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Doc/library/heapq.rst b/Doc/library/heapq.rst --- a/Doc/library/heapq.rst +++ b/Doc/library/heapq.rst @@ -16,7 +16,7 @@ .. seealso:: Latest version of the `heapq Python source code - `_ + `_ Heaps are binary trees for which every parent node has a value less than or equal to any of its children. This implementation uses arrays for which -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Wed Aug 10 15:58:55 2011 From: python-checkins at python.org (senthil.kumaran) Date: Wed, 10 Aug 2011 15:58:55 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=282=2E7=29=3A_Remove_the_un-e?= =?utf8?q?xercised_in-module_test_code=2E?= Message-ID: http://hg.python.org/cpython/rev/d5b274a0b0a5 changeset: 71805:d5b274a0b0a5 branch: 2.7 user: Senthil Kumaran date: Wed Aug 10 21:57:22 2011 +0800 summary: Remove the un-exercised in-module test code. files: Lib/httplib.py | 68 -------------------------------------- 1 files changed, 0 insertions(+), 68 deletions(-) diff --git a/Lib/httplib.py b/Lib/httplib.py --- a/Lib/httplib.py +++ b/Lib/httplib.py @@ -1322,71 +1322,3 @@ return L + self._file.readlines() else: return L + self._file.readlines(size) - -def test(): - """Test this module. - - A hodge podge of tests collected here, because they have too many - external dependencies for the regular test suite. - """ - - import sys - import getopt - opts, args = getopt.getopt(sys.argv[1:], 'd') - dl = 0 - for o, a in opts: - if o == '-d': dl = dl + 1 - host = 'www.python.org' - selector = '/' - if args[0:]: host = args[0] - if args[1:]: selector = args[1] - h = HTTP() - h.set_debuglevel(dl) - h.connect(host) - h.putrequest('GET', selector) - h.endheaders() - status, reason, headers = h.getreply() - print 'status =', status - print 'reason =', reason - print "read", len(h.getfile().read()) - print - if headers: - for header in headers.headers: print header.strip() - print - - # minimal test that code to extract host from url works - class HTTP11(HTTP): - _http_vsn = 11 - _http_vsn_str = 'HTTP/1.1' - - h = HTTP11('www.python.org') - h.putrequest('GET', 'http://www.python.org/~jeremy/') - h.endheaders() - h.getreply() - h.close() - - try: - import ssl - except ImportError: - pass - else: - - for host, selector in (('sourceforge.net', '/projects/python'), - ): - print "https://%s%s" % (host, selector) - hs = HTTPS() - hs.set_debuglevel(dl) - hs.connect(host) - hs.putrequest('GET', selector) - hs.endheaders() - status, reason, headers = hs.getreply() - print 'status =', status - print 'reason =', reason - print "read", len(hs.getfile().read()) - print - if headers: - for header in headers.headers: print header.strip() - print - -if __name__ == '__main__': - test() -- Repository URL: http://hg.python.org/cpython From tjreedy at udel.edu Wed Aug 10 21:55:55 2011 From: tjreedy at udel.edu (Terry Reedy) Date: Wed, 10 Aug 2011 15:55:55 -0400 Subject: [Python-checkins] cpython (2.7): Fix closes Issue12722 - link heapq source in the text format in the In-Reply-To: References: Message-ID: <4E42E24B.8020601@udel.edu> > > Latest version of the `heapq Python source code > -`_ > +`_ Should links be to the hg repository instead of svn? Is svn updated from hg? I thought is was (mostly) historical read-only. From python-checkins at python.org Thu Aug 11 03:25:31 2011 From: python-checkins at python.org (senthil.kumaran) Date: Thu, 11 Aug 2011 03:25:31 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=283=2E2=29=3A_Fix_closes_Issu?= =?utf8?q?e10087_-__fixing_the_output_of_calendar_display_in_the_html?= Message-ID: http://hg.python.org/cpython/rev/9fc7ef60ea06 changeset: 71806:9fc7ef60ea06 branch: 3.2 parent: 71791:6ef65516fd7a user: Senthil Kumaran date: Thu Aug 11 09:22:52 2011 +0800 summary: Fix closes Issue10087 - fixing the output of calendar display in the html format. Patch by Chris Lambacher. Test Contributed by catherine. files: Lib/calendar.py | 11 +++++++---- Lib/test/test_calendar.py | 7 +++++++ 2 files changed, 14 insertions(+), 4 deletions(-) diff --git a/Lib/calendar.py b/Lib/calendar.py --- a/Lib/calendar.py +++ b/Lib/calendar.py @@ -636,7 +636,7 @@ parser.add_option( "-e", "--encoding", dest="encoding", default=None, - help="Encoding to use for output" + help="Encoding to use for output." ) parser.add_option( "-t", "--type", @@ -662,10 +662,11 @@ if encoding is None: encoding = sys.getdefaultencoding() optdict = dict(encoding=encoding, css=options.css) + write = sys.stdout.buffer.write if len(args) == 1: - print(cal.formatyearpage(datetime.date.today().year, **optdict)) + write(cal.formatyearpage(datetime.date.today().year, **optdict)) elif len(args) == 2: - print(cal.formatyearpage(int(args[1]), **optdict)) + write(cal.formatyearpage(int(args[1]), **optdict)) else: parser.error("incorrect number of arguments") sys.exit(1) @@ -687,9 +688,11 @@ else: parser.error("incorrect number of arguments") sys.exit(1) + write = sys.stdout.write if options.encoding: result = result.encode(options.encoding) - print(result) + write = sys.stdout.buffer.write + write(result) if __name__ == "__main__": diff --git a/Lib/test/test_calendar.py b/Lib/test/test_calendar.py --- a/Lib/test/test_calendar.py +++ b/Lib/test/test_calendar.py @@ -2,6 +2,7 @@ import unittest from test import support +from test.script_helper import assert_python_ok import time import locale @@ -451,6 +452,11 @@ self.assertEqual(calendar.leapdays(1997,2020), 5) +class ConsoleOutputTestCase(unittest.TestCase): + def test_outputs_bytes(self): + (return_code, stdout, stderr) = assert_python_ok('-m', 'calendar', '--type=html', '2010') + self.assertEqual(stdout[:6], b' http://hg.python.org/cpython/rev/23316468ed4f changeset: 71807:23316468ed4f branch: 3.2 user: Senthil Kumaran date: Thu Aug 11 09:24:37 2011 +0800 summary: News item for Issue10087. files: Misc/NEWS | 2 ++ 1 files changed, 2 insertions(+), 0 deletions(-) diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -41,6 +41,8 @@ Library ------- +- Issue #10087: Fix the html output format of the calendar module. + - Issue #12540: Prevent zombie IDLE processes on Windows due to changes in os.kill(). -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Thu Aug 11 03:25:58 2011 From: python-checkins at python.org (senthil.kumaran) Date: Thu, 11 Aug 2011 03:25:58 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=28merge_3=2E2_-=3E_default=29?= =?utf8?q?=3A_merge_from_3=2E2?= Message-ID: http://hg.python.org/cpython/rev/fdfd1d67d9fb changeset: 71808:fdfd1d67d9fb parent: 71803:1b4fae183da3 parent: 71807:23316468ed4f user: Senthil Kumaran date: Thu Aug 11 09:25:45 2011 +0800 summary: merge from 3.2 files: Lib/calendar.py | 11 +++++++---- Lib/test/test_calendar.py | 7 +++++++ Misc/NEWS | 2 ++ 3 files changed, 16 insertions(+), 4 deletions(-) diff --git a/Lib/calendar.py b/Lib/calendar.py --- a/Lib/calendar.py +++ b/Lib/calendar.py @@ -636,7 +636,7 @@ parser.add_option( "-e", "--encoding", dest="encoding", default=None, - help="Encoding to use for output" + help="Encoding to use for output." ) parser.add_option( "-t", "--type", @@ -662,10 +662,11 @@ if encoding is None: encoding = sys.getdefaultencoding() optdict = dict(encoding=encoding, css=options.css) + write = sys.stdout.buffer.write if len(args) == 1: - print(cal.formatyearpage(datetime.date.today().year, **optdict)) + write(cal.formatyearpage(datetime.date.today().year, **optdict)) elif len(args) == 2: - print(cal.formatyearpage(int(args[1]), **optdict)) + write(cal.formatyearpage(int(args[1]), **optdict)) else: parser.error("incorrect number of arguments") sys.exit(1) @@ -687,9 +688,11 @@ else: parser.error("incorrect number of arguments") sys.exit(1) + write = sys.stdout.write if options.encoding: result = result.encode(options.encoding) - print(result) + write = sys.stdout.buffer.write + write(result) if __name__ == "__main__": diff --git a/Lib/test/test_calendar.py b/Lib/test/test_calendar.py --- a/Lib/test/test_calendar.py +++ b/Lib/test/test_calendar.py @@ -2,6 +2,7 @@ import unittest from test import support +from test.script_helper import assert_python_ok import time import locale @@ -451,6 +452,11 @@ self.assertEqual(calendar.leapdays(1997,2020), 5) +class ConsoleOutputTestCase(unittest.TestCase): + def test_outputs_bytes(self): + (return_code, stdout, stderr) = assert_python_ok('-m', 'calendar', '--type=html', '2010') + self.assertEqual(stdout[:6], b' http://hg.python.org/cpython/rev/77a65b078852 changeset: 71809:77a65b078852 parent: 71803:1b4fae183da3 user: Brian Curtin date: Wed Aug 10 20:05:21 2011 -0500 summary: Add Py_RETURN_NOTIMPLEMENTED macro. Fixes #12724. files: Include/object.h | 4 ++++ 1 files changed, 4 insertions(+), 0 deletions(-) diff --git a/Include/object.h b/Include/object.h --- a/Include/object.h +++ b/Include/object.h @@ -792,6 +792,10 @@ PyAPI_DATA(PyObject) _Py_NotImplementedStruct; /* Don't use this directly */ #define Py_NotImplemented (&_Py_NotImplementedStruct) +/* Macro for returning Py_NotImplemented from a function */ +#define Py_RETURN_NOTIMPLEMENTED \ + return Py_INCREF(Py_NotImplemented), Py_NotImplemented + /* Rich comparison opcodes */ #define Py_LT 0 #define Py_LE 1 -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Thu Aug 11 03:34:38 2011 From: python-checkins at python.org (brian.curtin) Date: Thu, 11 Aug 2011 03:34:38 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Replace_Py=5FNotImplemented?= =?utf8?q?_returns_with_the_macro_form_Py=5FRETURN=5FNOTIMPLEMENTED=2E?= Message-ID: http://hg.python.org/cpython/rev/d0b0fcbb40db changeset: 71810:d0b0fcbb40db user: Brian Curtin date: Wed Aug 10 20:28:54 2011 -0500 summary: Replace Py_NotImplemented returns with the macro form Py_RETURN_NOTIMPLEMENTED. The macro was introduced in #12724. files: Modules/_collectionsmodule.c | 3 +- Modules/_datetimemodule.c | 59 ++++++++--------------- Modules/_sqlite/row.c | 10 +-- Modules/arraymodule.c | 6 +- Modules/posixmodule.c | 10 +-- Modules/xxlimited.c | 3 +- Objects/abstract.c | 3 +- Objects/bytearrayobject.c | 9 +-- Objects/classobject.c | 6 +- Objects/codeobject.c | 3 +- Objects/complexobject.c | 3 +- Objects/dictobject.c | 6 +- Objects/floatobject.c | 3 +- Objects/listobject.c | 6 +- Objects/longobject.c | 9 +-- Objects/memoryobject.c | 3 +- Objects/methodobject.c | 3 +- Objects/object.c | 3 +- Objects/setobject.c | 59 ++++++++--------------- Objects/sliceobject.c | 6 +- Objects/tupleobject.c | 6 +- Objects/typeobject.c | 18 ++---- Objects/unicodeobject.c | 9 +-- Objects/weakrefobject.c | 3 +- 24 files changed, 87 insertions(+), 162 deletions(-) diff --git a/Modules/_collectionsmodule.c b/Modules/_collectionsmodule.c --- a/Modules/_collectionsmodule.c +++ b/Modules/_collectionsmodule.c @@ -832,8 +832,7 @@ if (!PyObject_TypeCheck(v, &deque_type) || !PyObject_TypeCheck(w, &deque_type)) { - Py_INCREF(Py_NotImplemented); - return Py_NotImplemented; + Py_RETURN_NOTIMPLEMENTED; } /* Shortcuts */ diff --git a/Modules/_datetimemodule.c b/Modules/_datetimemodule.c --- a/Modules/_datetimemodule.c +++ b/Modules/_datetimemodule.c @@ -1812,8 +1812,7 @@ return diff_to_bool(diff, op); } else { - Py_INCREF(Py_NotImplemented); - return Py_NotImplemented; + Py_RETURN_NOTIMPLEMENTED; } } @@ -1911,10 +1910,8 @@ PyObject *pyus_remainder; PyObject *remainder; - if (!PyDelta_Check(left) || !PyDelta_Check(right)) { - Py_INCREF(Py_NotImplemented); - return Py_NotImplemented; - } + if (!PyDelta_Check(left) || !PyDelta_Check(right)) + Py_RETURN_NOTIMPLEMENTED; pyus_left = delta_to_microseconds((PyDateTime_Delta *)left); if (pyus_left == NULL) @@ -1949,10 +1946,8 @@ PyObject *delta; PyObject *result; - if (!PyDelta_Check(left) || !PyDelta_Check(right)) { - Py_INCREF(Py_NotImplemented); - return Py_NotImplemented; - } + if (!PyDelta_Check(left) || !PyDelta_Check(right)) + Py_RETURN_NOTIMPLEMENTED; pyus_left = delta_to_microseconds((PyDateTime_Delta *)left); if (pyus_left == NULL) @@ -2546,10 +2541,9 @@ static PyObject * date_add(PyObject *left, PyObject *right) { - if (PyDateTime_Check(left) || PyDateTime_Check(right)) { - Py_INCREF(Py_NotImplemented); - return Py_NotImplemented; - } + if (PyDateTime_Check(left) || PyDateTime_Check(right)) + Py_RETURN_NOTIMPLEMENTED; + if (PyDate_Check(left)) { /* date + ??? */ if (PyDelta_Check(right)) @@ -2568,17 +2562,15 @@ (PyDateTime_Delta *) left, 0); } - Py_INCREF(Py_NotImplemented); - return Py_NotImplemented; + Py_RETURN_NOTIMPLEMENTED; } static PyObject * date_subtract(PyObject *left, PyObject *right) { - if (PyDateTime_Check(left) || PyDateTime_Check(right)) { - Py_INCREF(Py_NotImplemented); - return Py_NotImplemented; - } + if (PyDateTime_Check(left) || PyDateTime_Check(right)) + Py_RETURN_NOTIMPLEMENTED; + if (PyDate_Check(left)) { if (PyDate_Check(right)) { /* date - date */ @@ -2597,8 +2589,7 @@ 1); } } - Py_INCREF(Py_NotImplemented); - return Py_NotImplemented; + Py_RETURN_NOTIMPLEMENTED; } @@ -2715,10 +2706,8 @@ _PyDateTime_DATE_DATASIZE); return diff_to_bool(diff, op); } - else { - Py_INCREF(Py_NotImplemented); - return Py_NotImplemented; - } + else + Py_RETURN_NOTIMPLEMENTED; } static PyObject * @@ -3215,10 +3204,8 @@ timezone_richcompare(PyDateTime_TimeZone *self, PyDateTime_TimeZone *other, int op) { - if (op != Py_EQ && op != Py_NE) { - Py_INCREF(Py_NotImplemented); - return Py_NotImplemented; - } + if (op != Py_EQ && op != Py_NE) + Py_RETURN_NOTIMPLEMENTED; return delta_richcompare(self->offset, other->offset, op); } @@ -3664,10 +3651,8 @@ PyObject *offset1, *offset2; int diff; - if (! PyTime_Check(other)) { - Py_INCREF(Py_NotImplemented); - return Py_NotImplemented; - } + if (! PyTime_Check(other)) + Py_RETURN_NOTIMPLEMENTED; if (GET_TIME_TZINFO(self) == GET_TIME_TZINFO(other)) { diff = memcmp(((PyDateTime_Time *)self)->data, @@ -4356,8 +4341,7 @@ (PyDateTime_Delta *) left, 1); } - Py_INCREF(Py_NotImplemented); - return Py_NotImplemented; + Py_RETURN_NOTIMPLEMENTED; } static PyObject * @@ -4559,8 +4543,7 @@ Py_RETURN_TRUE; return cmperror(self, other); } - Py_INCREF(Py_NotImplemented); - return Py_NotImplemented; + Py_RETURN_NOTIMPLEMENTED; } if (GET_DT_TZINFO(self) == GET_DT_TZINFO(other)) { diff --git a/Modules/_sqlite/row.c b/Modules/_sqlite/row.c --- a/Modules/_sqlite/row.c +++ b/Modules/_sqlite/row.c @@ -173,10 +173,9 @@ static PyObject* pysqlite_row_richcompare(pysqlite_Row *self, PyObject *_other, int opid) { - if (opid != Py_EQ && opid != Py_NE) { - Py_INCREF(Py_NotImplemented); - return Py_NotImplemented; - } + if (opid != Py_EQ && opid != Py_NE) + Py_RETURN_NOTIMPLEMENTED; + if (PyType_IsSubtype(Py_TYPE(_other), &pysqlite_RowType)) { pysqlite_Row *other = (pysqlite_Row *)_other; PyObject *res = PyObject_RichCompare(self->description, other->description, opid); @@ -186,8 +185,7 @@ return PyObject_RichCompare(self->data, other->data, opid); } } - Py_INCREF(Py_NotImplemented); - return Py_NotImplemented; + Py_RETURN_NOTIMPLEMENTED; } PyMappingMethods pysqlite_row_as_mapping = { diff --git a/Modules/arraymodule.c b/Modules/arraymodule.c --- a/Modules/arraymodule.c +++ b/Modules/arraymodule.c @@ -514,10 +514,8 @@ Py_ssize_t i, k; PyObject *res; - if (!array_Check(v) || !array_Check(w)) { - Py_INCREF(Py_NotImplemented); - return Py_NotImplemented; - } + if (!array_Check(v) || !array_Check(w)) + Py_RETURN_NOTIMPLEMENTED; va = (arrayobject *)v; wa = (arrayobject *)w; diff --git a/Modules/posixmodule.c b/Modules/posixmodule.c --- a/Modules/posixmodule.c +++ b/Modules/posixmodule.c @@ -4925,10 +4925,9 @@ { int eq; - if ((op != Py_EQ && op != Py_NE) || Py_TYPE(other) != &cpu_set_type) { - Py_INCREF(Py_NotImplemented); - return Py_NotImplemented; - } + if ((op != Py_EQ && op != Py_NE) || Py_TYPE(other) != &cpu_set_type) + Py_RETURN_NOTIMPLEMENTED; + eq = set->ncpus == other->ncpus && CPU_EQUAL_S(set->size, set->set, other->set); if ((op == Py_EQ) ? eq : !eq) Py_RETURN_TRUE; @@ -4949,8 +4948,7 @@ } \ if (Py_TYPE(right) != &cpu_set_type || left->ncpus != right->ncpus) { \ Py_DECREF(res); \ - Py_INCREF(Py_NotImplemented); \ - return Py_NotImplemented; \ + Py_RETURN_NOTIMPLEMENTED; \ } \ assert(left->size == right->size && right->size == res->size); \ op(res->size, res->set, left->set, right->set); \ diff --git a/Modules/xxlimited.c b/Modules/xxlimited.c --- a/Modules/xxlimited.c +++ b/Modules/xxlimited.c @@ -187,8 +187,7 @@ static PyObject * null_richcompare(PyObject *self, PyObject *other, int op) { - Py_INCREF(Py_NotImplemented); - return Py_NotImplemented; + Py_RETURN_NOTIMPLEMENTED; } static PyType_Slot Null_Type_slots[] = { diff --git a/Objects/abstract.c b/Objects/abstract.c --- a/Objects/abstract.c +++ b/Objects/abstract.c @@ -793,8 +793,7 @@ return x; Py_DECREF(x); /* can't do it */ } - Py_INCREF(Py_NotImplemented); - return Py_NotImplemented; + Py_RETURN_NOTIMPLEMENTED; } static PyObject * diff --git a/Objects/bytearrayobject.c b/Objects/bytearrayobject.c --- a/Objects/bytearrayobject.c +++ b/Objects/bytearrayobject.c @@ -964,23 +964,20 @@ return NULL; } - Py_INCREF(Py_NotImplemented); - return Py_NotImplemented; + Py_RETURN_NOTIMPLEMENTED; } self_size = _getbuffer(self, &self_bytes); if (self_size < 0) { PyErr_Clear(); - Py_INCREF(Py_NotImplemented); - return Py_NotImplemented; + Py_RETURN_NOTIMPLEMENTED; } other_size = _getbuffer(other, &other_bytes); if (other_size < 0) { PyErr_Clear(); PyBuffer_Release(&self_bytes); - Py_INCREF(Py_NotImplemented); - return Py_NotImplemented; + Py_RETURN_NOTIMPLEMENTED; } if (self_size != other_size && (op == Py_EQ || op == Py_NE)) { diff --git a/Objects/classobject.c b/Objects/classobject.c --- a/Objects/classobject.c +++ b/Objects/classobject.c @@ -190,8 +190,7 @@ !PyMethod_Check(self) || !PyMethod_Check(other)) { - Py_INCREF(Py_NotImplemented); - return Py_NotImplemented; + Py_RETURN_NOTIMPLEMENTED; } a = (PyMethodObject *)self; b = (PyMethodObject *)other; @@ -516,8 +515,7 @@ !PyInstanceMethod_Check(self) || !PyInstanceMethod_Check(other)) { - Py_INCREF(Py_NotImplemented); - return Py_NotImplemented; + Py_RETURN_NOTIMPLEMENTED; } a = (PyInstanceMethodObject *)self; b = (PyInstanceMethodObject *)other; diff --git a/Objects/codeobject.c b/Objects/codeobject.c --- a/Objects/codeobject.c +++ b/Objects/codeobject.c @@ -402,8 +402,7 @@ if ((op != Py_EQ && op != Py_NE) || !PyCode_Check(self) || !PyCode_Check(other)) { - Py_INCREF(Py_NotImplemented); - return Py_NotImplemented; + Py_RETURN_NOTIMPLEMENTED; } co = (PyCodeObject *)self; diff --git a/Objects/complexobject.c b/Objects/complexobject.c --- a/Objects/complexobject.c +++ b/Objects/complexobject.c @@ -650,8 +650,7 @@ return res; Unimplemented: - Py_INCREF(Py_NotImplemented); - return Py_NotImplemented; + Py_RETURN_NOTIMPLEMENTED; } static PyObject * diff --git a/Objects/dictobject.c b/Objects/dictobject.c --- a/Objects/dictobject.c +++ b/Objects/dictobject.c @@ -2608,10 +2608,8 @@ assert(PyDictViewSet_Check(self)); assert(other != NULL); - if (!PyAnySet_Check(other) && !PyDictViewSet_Check(other)) { - Py_INCREF(Py_NotImplemented); - return Py_NotImplemented; - } + if (!PyAnySet_Check(other) && !PyDictViewSet_Check(other)) + Py_RETURN_NOTIMPLEMENTED; len_self = PyObject_Size(self); if (len_self < 0) diff --git a/Objects/floatobject.c b/Objects/floatobject.c --- a/Objects/floatobject.c +++ b/Objects/floatobject.c @@ -517,8 +517,7 @@ return PyBool_FromLong(r); Unimplemented: - Py_INCREF(Py_NotImplemented); - return Py_NotImplemented; + Py_RETURN_NOTIMPLEMENTED; } static Py_hash_t diff --git a/Objects/listobject.c b/Objects/listobject.c --- a/Objects/listobject.c +++ b/Objects/listobject.c @@ -2225,10 +2225,8 @@ PyListObject *vl, *wl; Py_ssize_t i; - if (!PyList_Check(v) || !PyList_Check(w)) { - Py_INCREF(Py_NotImplemented); - return Py_NotImplemented; - } + if (!PyList_Check(v) || !PyList_Check(w)) + Py_RETURN_NOTIMPLEMENTED; vl = (PyListObject *)v; wl = (PyListObject *)w; diff --git a/Objects/longobject.c b/Objects/longobject.c --- a/Objects/longobject.c +++ b/Objects/longobject.c @@ -1382,10 +1382,8 @@ #define CHECK_BINOP(v,w) \ do { \ - if (!PyLong_Check(v) || !PyLong_Check(w)) { \ - Py_INCREF(Py_NotImplemented); \ - return Py_NotImplemented; \ - } \ + if (!PyLong_Check(v) || !PyLong_Check(w)) \ + Py_RETURN_NOTIMPLEMENTED; \ } while(0) /* bits_in_digit(d) returns the unique integer k such that 2**(k-1) <= d < @@ -3611,8 +3609,7 @@ else { Py_DECREF(a); Py_DECREF(b); - Py_INCREF(Py_NotImplemented); - return Py_NotImplemented; + Py_RETURN_NOTIMPLEMENTED; } if (Py_SIZE(b) < 0) { /* if exponent is negative */ diff --git a/Objects/memoryobject.c b/Objects/memoryobject.c --- a/Objects/memoryobject.c +++ b/Objects/memoryobject.c @@ -773,8 +773,7 @@ _notimpl: PyBuffer_Release(&vv); PyBuffer_Release(&ww); - Py_INCREF(Py_NotImplemented); - return Py_NotImplemented; + Py_RETURN_NOTIMPLEMENTED; } diff --git a/Objects/methodobject.c b/Objects/methodobject.c --- a/Objects/methodobject.c +++ b/Objects/methodobject.c @@ -208,8 +208,7 @@ !PyCFunction_Check(self) || !PyCFunction_Check(other)) { - Py_INCREF(Py_NotImplemented); - return Py_NotImplemented; + Py_RETURN_NOTIMPLEMENTED; } a = (PyCFunctionObject *)self; b = (PyCFunctionObject *)other; diff --git a/Objects/object.c b/Objects/object.c --- a/Objects/object.c +++ b/Objects/object.c @@ -1392,8 +1392,7 @@ PyErr_SetString(PyExc_TypeError, "NotImplementedType takes no arguments"); return NULL; } - Py_INCREF(Py_NotImplemented); - return Py_NotImplemented; + Py_RETURN_NOTIMPLEMENTED; } static PyTypeObject PyNotImplemented_Type = { diff --git a/Objects/setobject.c b/Objects/setobject.c --- a/Objects/setobject.c +++ b/Objects/setobject.c @@ -1212,10 +1212,8 @@ { PySetObject *result; - if (!PyAnySet_Check(so) || !PyAnySet_Check(other)) { - Py_INCREF(Py_NotImplemented); - return Py_NotImplemented; - } + if (!PyAnySet_Check(so) || !PyAnySet_Check(other)) + Py_RETURN_NOTIMPLEMENTED; result = (PySetObject *)set_copy(so); if (result == NULL) @@ -1232,10 +1230,9 @@ static PyObject * set_ior(PySetObject *so, PyObject *other) { - if (!PyAnySet_Check(other)) { - Py_INCREF(Py_NotImplemented); - return Py_NotImplemented; - } + if (!PyAnySet_Check(other)) + Py_RETURN_NOTIMPLEMENTED; + if (set_update_internal(so, other) == -1) return NULL; Py_INCREF(so); @@ -1385,10 +1382,8 @@ static PyObject * set_and(PySetObject *so, PyObject *other) { - if (!PyAnySet_Check(so) || !PyAnySet_Check(other)) { - Py_INCREF(Py_NotImplemented); - return Py_NotImplemented; - } + if (!PyAnySet_Check(so) || !PyAnySet_Check(other)) + Py_RETURN_NOTIMPLEMENTED; return set_intersection(so, other); } @@ -1397,10 +1392,8 @@ { PyObject *result; - if (!PyAnySet_Check(other)) { - Py_INCREF(Py_NotImplemented); - return Py_NotImplemented; - } + if (!PyAnySet_Check(other)) + Py_RETURN_NOTIMPLEMENTED; result = set_intersection_update(so, other); if (result == NULL) return NULL; @@ -1627,20 +1620,16 @@ static PyObject * set_sub(PySetObject *so, PyObject *other) { - if (!PyAnySet_Check(so) || !PyAnySet_Check(other)) { - Py_INCREF(Py_NotImplemented); - return Py_NotImplemented; - } + if (!PyAnySet_Check(so) || !PyAnySet_Check(other)) + Py_RETURN_NOTIMPLEMENTED; return set_difference(so, other); } static PyObject * set_isub(PySetObject *so, PyObject *other) { - if (!PyAnySet_Check(other)) { - Py_INCREF(Py_NotImplemented); - return Py_NotImplemented; - } + if (!PyAnySet_Check(other)) + Py_RETURN_NOTIMPLEMENTED; if (set_difference_update_internal(so, other) == -1) return NULL; Py_INCREF(so); @@ -1738,10 +1727,8 @@ static PyObject * set_xor(PySetObject *so, PyObject *other) { - if (!PyAnySet_Check(so) || !PyAnySet_Check(other)) { - Py_INCREF(Py_NotImplemented); - return Py_NotImplemented; - } + if (!PyAnySet_Check(so) || !PyAnySet_Check(other)) + Py_RETURN_NOTIMPLEMENTED; return set_symmetric_difference(so, other); } @@ -1750,10 +1737,8 @@ { PyObject *result; - if (!PyAnySet_Check(other)) { - Py_INCREF(Py_NotImplemented); - return Py_NotImplemented; - } + if (!PyAnySet_Check(other)) + Py_RETURN_NOTIMPLEMENTED; result = set_symmetric_difference_update(so, other); if (result == NULL) return NULL; @@ -1815,10 +1800,9 @@ { PyObject *r1, *r2; - if(!PyAnySet_Check(w)) { - Py_INCREF(Py_NotImplemented); - return Py_NotImplemented; - } + if(!PyAnySet_Check(w)) + Py_RETURN_NOTIMPLEMENTED; + switch (op) { case Py_EQ: if (PySet_GET_SIZE(v) != PySet_GET_SIZE(w)) @@ -1848,8 +1832,7 @@ Py_RETURN_FALSE; return set_issuperset(v, w); } - Py_INCREF(Py_NotImplemented); - return Py_NotImplemented; + Py_RETURN_NOTIMPLEMENTED; } static PyObject * diff --git a/Objects/sliceobject.c b/Objects/sliceobject.c --- a/Objects/sliceobject.c +++ b/Objects/sliceobject.c @@ -326,10 +326,8 @@ PyObject *t2; PyObject *res; - if (!PySlice_Check(v) || !PySlice_Check(w)) { - Py_INCREF(Py_NotImplemented); - return Py_NotImplemented; - } + if (!PySlice_Check(v) || !PySlice_Check(w)) + Py_RETURN_NOTIMPLEMENTED; if (v == w) { /* XXX Do we really need this shortcut? diff --git a/Objects/tupleobject.c b/Objects/tupleobject.c --- a/Objects/tupleobject.c +++ b/Objects/tupleobject.c @@ -546,10 +546,8 @@ Py_ssize_t i; Py_ssize_t vlen, wlen; - if (!PyTuple_Check(v) || !PyTuple_Check(w)) { - Py_INCREF(Py_NotImplemented); - return Py_NotImplemented; - } + if (!PyTuple_Check(v) || !PyTuple_Check(w)) + Py_RETURN_NOTIMPLEMENTED; vt = (PyTupleObject *)v; wt = (PyTupleObject *)w; diff --git a/Objects/typeobject.c b/Objects/typeobject.c --- a/Objects/typeobject.c +++ b/Objects/typeobject.c @@ -1212,10 +1212,8 @@ func = lookup_maybe(o, name, nameobj); if (func == NULL) { va_end(va); - if (!PyErr_Occurred()) { - Py_INCREF(Py_NotImplemented); - return Py_NotImplemented; - } + if (!PyErr_Occurred()) + Py_RETURN_NOTIMPLEMENTED; return NULL; } @@ -3449,8 +3447,7 @@ static PyObject * object_subclasshook(PyObject *cls, PyObject *args) { - Py_INCREF(Py_NotImplemented); - return Py_NotImplemented; + Py_RETURN_NOTIMPLEMENTED; } PyDoc_STRVAR(object_subclasshook_doc, @@ -4818,8 +4815,7 @@ return call_maybe( \ other, ROPSTR, &rcache_str, "(O)", self); \ } \ - Py_INCREF(Py_NotImplemented); \ - return Py_NotImplemented; \ + Py_RETURN_NOTIMPLEMENTED; \ } #define SLOT1BIN(FUNCNAME, SLOTNAME, OPSTR, ROPSTR) \ @@ -4996,8 +4992,7 @@ return call_method(self, "__pow__", &pow_str, "(OO)", other, modulus); } - Py_INCREF(Py_NotImplemented); - return Py_NotImplemented; + Py_RETURN_NOTIMPLEMENTED; } SLOT0(slot_nb_negative, "__neg__") @@ -5320,8 +5315,7 @@ func = lookup_method(self, name_op[op], &op_str[op]); if (func == NULL) { PyErr_Clear(); - Py_INCREF(Py_NotImplemented); - return Py_NotImplemented; + Py_RETURN_NOTIMPLEMENTED; } args = PyTuple_Pack(1, other); if (args == NULL) diff --git a/Objects/unicodeobject.c b/Objects/unicodeobject.c --- a/Objects/unicodeobject.c +++ b/Objects/unicodeobject.c @@ -7417,8 +7417,7 @@ return v; } - Py_INCREF(Py_NotImplemented); - return Py_NotImplemented; + Py_RETURN_NOTIMPLEMENTED; } int @@ -9291,10 +9290,8 @@ static PyObject * unicode_mod(PyObject *v, PyObject *w) { - if (!PyUnicode_Check(v)) { - Py_INCREF(Py_NotImplemented); - return Py_NotImplemented; - } + if (!PyUnicode_Check(v)) + Py_RETURN_NOTIMPLEMENTED; return PyUnicode_Format(v, w); } diff --git a/Objects/weakrefobject.c b/Objects/weakrefobject.c --- a/Objects/weakrefobject.c +++ b/Objects/weakrefobject.c @@ -193,8 +193,7 @@ if ((op != Py_EQ && op != Py_NE) || !PyWeakref_Check(self) || !PyWeakref_Check(other)) { - Py_INCREF(Py_NotImplemented); - return Py_NotImplemented; + Py_RETURN_NOTIMPLEMENTED; } if (PyWeakref_GET_OBJECT(self) == Py_None || PyWeakref_GET_OBJECT(other) == Py_None) { -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Thu Aug 11 03:34:39 2011 From: python-checkins at python.org (brian.curtin) Date: Thu, 11 Aug 2011 03:34:39 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_News_item_for_=2312724?= Message-ID: http://hg.python.org/cpython/rev/3a6782f2a4a8 changeset: 71811:3a6782f2a4a8 user: Brian Curtin date: Wed Aug 10 20:32:10 2011 -0500 summary: News item for #12724 files: Misc/NEWS | 2 ++ 1 files changed, 2 insertions(+), 0 deletions(-) diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -1345,6 +1345,8 @@ C-API ----- +- Issue #12724: Add Py_RETURN_NOTIMPLEMENTED macro for returning NotImplemented. + - PY_PATCHLEVEL_REVISION has been removed, since it's meaningless with Mercurial. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Thu Aug 11 03:34:40 2011 From: python-checkins at python.org (brian.curtin) Date: Thu, 11 Aug 2011 03:34:40 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=28merge_default_-=3E_default?= =?utf8?q?=29=3A_merge?= Message-ID: http://hg.python.org/cpython/rev/5b2455d6cfe9 changeset: 71812:5b2455d6cfe9 parent: 71811:3a6782f2a4a8 parent: 71808:fdfd1d67d9fb user: Brian Curtin date: Wed Aug 10 20:34:20 2011 -0500 summary: merge files: Lib/calendar.py | 11 +++++++---- Lib/test/test_calendar.py | 7 +++++++ Misc/NEWS | 2 ++ 3 files changed, 16 insertions(+), 4 deletions(-) diff --git a/Lib/calendar.py b/Lib/calendar.py --- a/Lib/calendar.py +++ b/Lib/calendar.py @@ -636,7 +636,7 @@ parser.add_option( "-e", "--encoding", dest="encoding", default=None, - help="Encoding to use for output" + help="Encoding to use for output." ) parser.add_option( "-t", "--type", @@ -662,10 +662,11 @@ if encoding is None: encoding = sys.getdefaultencoding() optdict = dict(encoding=encoding, css=options.css) + write = sys.stdout.buffer.write if len(args) == 1: - print(cal.formatyearpage(datetime.date.today().year, **optdict)) + write(cal.formatyearpage(datetime.date.today().year, **optdict)) elif len(args) == 2: - print(cal.formatyearpage(int(args[1]), **optdict)) + write(cal.formatyearpage(int(args[1]), **optdict)) else: parser.error("incorrect number of arguments") sys.exit(1) @@ -687,9 +688,11 @@ else: parser.error("incorrect number of arguments") sys.exit(1) + write = sys.stdout.write if options.encoding: result = result.encode(options.encoding) - print(result) + write = sys.stdout.buffer.write + write(result) if __name__ == "__main__": diff --git a/Lib/test/test_calendar.py b/Lib/test/test_calendar.py --- a/Lib/test/test_calendar.py +++ b/Lib/test/test_calendar.py @@ -2,6 +2,7 @@ import unittest from test import support +from test.script_helper import assert_python_ok import time import locale @@ -451,6 +452,11 @@ self.assertEqual(calendar.leapdays(1997,2020), 5) +class ConsoleOutputTestCase(unittest.TestCase): + def test_outputs_bytes(self): + (return_code, stdout, stderr) = assert_python_ok('-m', 'calendar', '--type=html', '2010') + self.assertEqual(stdout[:6], b' results for fdfd1d67d9fb on branch "default" -------------------------------------------- Command line was: ['./python', '-m', 'test.regrtest', '-uall', '-R', '3:3:/home/antoine/cpython/refleaks/refloguzor1v', '-x'] From python-checkins at python.org Thu Aug 11 14:47:07 2011 From: python-checkins at python.org (vinay.sajip) Date: Thu, 11 Aug 2011 14:47:07 +0200 Subject: [Python-checkins] =?utf8?b?Y3B5dGhvbiAoMi43KTogSXNzdWUgIzEyNzE4?= =?utf8?q?=3A_Add_documentation_on_using_custom_importers=2E?= Message-ID: http://hg.python.org/cpython/rev/0fbd44e3f342 changeset: 71813:0fbd44e3f342 branch: 2.7 parent: 71805:d5b274a0b0a5 user: Vinay Sajip date: Thu Aug 11 13:39:52 2011 +0100 summary: Issue #12718: Add documentation on using custom importers. files: Doc/library/logging.config.rst | 25 ++++++++++++++++++++++ 1 files changed, 25 insertions(+), 0 deletions(-) diff --git a/Doc/library/logging.config.rst b/Doc/library/logging.config.rst --- a/Doc/library/logging.config.rst +++ b/Doc/library/logging.config.rst @@ -516,6 +516,31 @@ to ``config_dict['handlers']['myhandler']['mykey']['123']`` if that fails. + +.. _logging-import-resolution: + +Import resolution and custom importers +"""""""""""""""""""""""""""""""""""""" + +Import resolution, by default, uses the builtin :func:`__import__` function +to do its importing. You may want to replace this with your own importing +mechanism: if so, you can replace the :attr:`importer` attribute of the +:class:`DictConfigurator` or its superclass, the +:class:`BaseConfigurator` class. However, you need to be +careful because of the way functions are accessed from classes via +descriptors. If you are using a Python callable to do your imports, and you +want to define it at class level rather than instance level, you need to wrap +it with :func:`staticmethod`. For example:: + + from importlib import import_module + from logging.config import BaseConfigurator + + BaseConfigurator.importer = staticmethod(import_module) + +You don't need to wrap with :func:`staticmethod` if you're setting the import +callable on a configurator *instance*. + + .. _logging-config-fileformat: Configuration file format -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Thu Aug 11 14:47:08 2011 From: python-checkins at python.org (vinay.sajip) Date: Thu, 11 Aug 2011 14:47:08 +0200 Subject: [Python-checkins] =?utf8?b?Y3B5dGhvbiAoMy4yKTogSXNzdWUgIzEyNzE4?= =?utf8?q?=3A_Add_documentation_on_using_custom_importers=2E?= Message-ID: http://hg.python.org/cpython/rev/1e96a4406565 changeset: 71814:1e96a4406565 branch: 3.2 parent: 71807:23316468ed4f user: Vinay Sajip date: Thu Aug 11 13:45:48 2011 +0100 summary: Issue #12718: Add documentation on using custom importers. files: Doc/library/logging.config.rst | 25 ++++++++++++++++++++++ 1 files changed, 25 insertions(+), 0 deletions(-) diff --git a/Doc/library/logging.config.rst b/Doc/library/logging.config.rst --- a/Doc/library/logging.config.rst +++ b/Doc/library/logging.config.rst @@ -513,6 +513,31 @@ to ``config_dict['handlers']['myhandler']['mykey']['123']`` if that fails. + +.. _logging-import-resolution: + +Import resolution and custom importers +"""""""""""""""""""""""""""""""""""""" + +Import resolution, by default, uses the builtin :func:`__import__` function +to do its importing. You may want to replace this with your own importing +mechanism: if so, you can replace the :attr:`importer` attribute of the +:class:`DictConfigurator` or its superclass, the +:class:`BaseConfigurator` class. However, you need to be +careful because of the way functions are accessed from classes via +descriptors. If you are using a Python callable to do your imports, and you +want to define it at class level rather than instance level, you need to wrap +it with :func:`staticmethod`. For example:: + + from importlib import import_module + from logging.config import BaseConfigurator + + BaseConfigurator.importer = staticmethod(import_module) + +You don't need to wrap with :func:`staticmethod` if you're setting the import +callable on a configurator *instance*. + + .. _logging-config-fileformat: Configuration file format -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Thu Aug 11 14:47:09 2011 From: python-checkins at python.org (vinay.sajip) Date: Thu, 11 Aug 2011 14:47:09 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=28merge_3=2E2_-=3E_default=29?= =?utf8?q?=3A_Closes_=2312718=3A_Merge_documentation_fix_from_3=2E2=2E?= Message-ID: http://hg.python.org/cpython/rev/76964d70c81c changeset: 71815:76964d70c81c parent: 71812:5b2455d6cfe9 parent: 71814:1e96a4406565 user: Vinay Sajip date: Thu Aug 11 13:46:54 2011 +0100 summary: Closes #12718: Merge documentation fix from 3.2. files: Doc/library/logging.config.rst | 25 ++++++++++++++++++++++ 1 files changed, 25 insertions(+), 0 deletions(-) diff --git a/Doc/library/logging.config.rst b/Doc/library/logging.config.rst --- a/Doc/library/logging.config.rst +++ b/Doc/library/logging.config.rst @@ -513,6 +513,31 @@ to ``config_dict['handlers']['myhandler']['mykey']['123']`` if that fails. + +.. _logging-import-resolution: + +Import resolution and custom importers +"""""""""""""""""""""""""""""""""""""" + +Import resolution, by default, uses the builtin :func:`__import__` function +to do its importing. You may want to replace this with your own importing +mechanism: if so, you can replace the :attr:`importer` attribute of the +:class:`DictConfigurator` or its superclass, the +:class:`BaseConfigurator` class. However, you need to be +careful because of the way functions are accessed from classes via +descriptors. If you are using a Python callable to do your imports, and you +want to define it at class level rather than instance level, you need to wrap +it with :func:`staticmethod`. For example:: + + from importlib import import_module + from logging.config import BaseConfigurator + + BaseConfigurator.importer = staticmethod(import_module) + +You don't need to wrap with :func:`staticmethod` if you're setting the import +callable on a configurator *instance*. + + .. _logging-config-fileformat: Configuration file format -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Thu Aug 11 16:41:53 2011 From: python-checkins at python.org (brian.curtin) Date: Thu, 11 Aug 2011 16:41:53 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Add_doc_for_Py=5FRETURN=5FN?= =?utf8?q?OTIMPLEMENTED=2C_added_in_=2312724=2E?= Message-ID: http://hg.python.org/cpython/rev/e88362fb4950 changeset: 71816:e88362fb4950 user: Brian Curtin date: Thu Aug 11 09:41:31 2011 -0500 summary: Add doc for Py_RETURN_NOTIMPLEMENTED, added in #12724. files: Doc/c-api/object.rst | 13 +++++++++++++ 1 files changed, 13 insertions(+), 0 deletions(-) diff --git a/Doc/c-api/object.rst b/Doc/c-api/object.rst --- a/Doc/c-api/object.rst +++ b/Doc/c-api/object.rst @@ -6,6 +6,19 @@ =============== +.. c:var:: PyObject* Py_NotImplemented + + The ``NotImplemented`` singleton, used to signal that an operation is + not implemented for the given type combination. + + +.. c:macro:: Py_RETURN_NOTIMPLEMENTED + + Properly handle returning :c:data:`Py_NotImplemented` from within a C + function (that is, increment the reference count of NotImplemented and + return it). + + .. c:function:: int PyObject_Print(PyObject *o, FILE *fp, int flags) Print an object *o*, on file *fp*. Returns ``-1`` on error. The flags argument -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Thu Aug 11 20:12:39 2011 From: python-checkins at python.org (antoine.pitrou) Date: Thu, 11 Aug 2011 20:12:39 +0200 Subject: [Python-checkins] =?utf8?q?peps=3A_Add_draft_for_PEP_3154=2C_=22P?= =?utf8?q?ickle_protocol_version_4=22?= Message-ID: http://hg.python.org/peps/rev/074a90b5bcbf changeset: 3920:074a90b5bcbf user: Antoine Pitrou date: Thu Aug 11 20:10:41 2011 +0200 summary: Add draft for PEP 3154, "Pickle protocol version 4" files: pep-3154.txt | 107 +++++++++++++++++++++++++++++++++++++++ 1 files changed, 107 insertions(+), 0 deletions(-) diff --git a/pep-3154.txt b/pep-3154.txt new file mode 100644 --- /dev/null +++ b/pep-3154.txt @@ -0,0 +1,107 @@ +PEP: 3154 +Title: Pickle protocol version 4 +Version: $Revision$ +Last-Modified: $Date$ +Author: Antoine Pitrou +Status: Draft +Type: Standards Track +Content-Type: text/x-rst +Created: 2011-08-11 +Python-Version: 3.3 +Post-History: +Resolution: TBD + + +Abstract +======== + +Data serialized using the pickle module must be portable accross Python +versions. It should also support the latest language features as well as +implementation-specific features. For this reason, the pickle module knows +about several protocols (currently numbered from 0 to 3), each of which +appeared in a different Python version. Using a low-numbered protocol +version allows to exchange data with old Python versions, while using a +high-numbered protocol allows access to newer features and sometimes more +efficient resource use (both CPU time required for (de)serializing, and +disk size / network bandwidth required for data transfer). + + +Rationale +========= + +The latest current protocol, coincidentally named protocol 3, appeared with +Python 3.0 and supports the new incompatible features in the language +(mainly, unicode strings by default and the new bytes object). The +opportunity was not taken at the time to improve the protocol in other ways. + +This PEP is an attempt to foster a number of small incremental improvements +in a future new protocol version. The PEP process is used in order to gather +as many improvements as possible, because the introduction of a new protocol +version should be a rare occurrence. + + +Improvements in discussion +========================== + +64-bit compatibility for large objects +-------------------------------------- + +Current protocol versions export object sizes for various built-in types +(str, bytes) as 32-bit ints. This forbids serialization of large data [1]_. +New opcodes are required to support very large bytes and str objects. + +Native opcodes for sets and frozensets +-------------------------------------- + +Many common built-in types (such as str, bytes, dict, list, tuple) have +dedicated opcodes to improve resource consumption when serializing and +deserializing them; however, sets and frozensets don't. Adding such opcodes +would be an obvious improvements. Also, dedicated set support could help +remove the current impossibility of pickling self-referential sets +[2]_. + +Binary encoding for all opcodes +------------------------------- + +The GLOBAL opcode, which is still used in protocol 3, uses the so-called +"text" mode of the pickle protocol, which involves looking for newlines +in the pickle stream. Looking for newlines is difficult to optimize on +a non-seekable stream, and therefore a new version of GLOBAL (BINGLOBAL?) +could use a binary encoding instead. + +It seems that all other opcodes emitted when using protocol 3 already use +binary encoding. + + + +Acknowledgments +=============== + +(...) + + +References +========== + +.. [1] "pickle not 64-bit ready": + http://bugs.python.org/issue11564 + +.. [2] "Cannot pickle self-referencing sets": + http://bugs.python.org/issue9269 + + +Copyright +========= + +This document has been placed in the public domain. + + + +.. + Local Variables: + mode: indented-text + indent-tabs-mode: nil + sentence-end-double-space: t + fill-column: 70 + coding: utf-8 + End: -- Repository URL: http://hg.python.org/peps From python-checkins at python.org Thu Aug 11 20:29:34 2011 From: python-checkins at python.org (phillip.eby) Date: Thu, 11 Aug 2011 20:29:34 +0200 Subject: [Python-checkins] =?utf8?q?peps=3A_Add_Eric_Araujo=27s_suggestion?= =?utf8?q?s=2C_plus_typo_fix=2E?= Message-ID: http://hg.python.org/peps/rev/0d53159dc0ae changeset: 3921:0d53159dc0ae user: pje date: Thu Aug 11 14:29:06 2011 -0400 summary: Add Eric Araujo's suggestions, plus typo fix. files: pep-0402.txt | 6 +++--- 1 files changed, 3 insertions(+), 3 deletions(-) diff --git a/pep-0402.txt b/pep-0402.txt old mode 100644 new mode 100755 --- a/pep-0402.txt +++ b/pep-0402.txt @@ -48,7 +48,7 @@ When new users come to Python from other languages, they are often -confused by Python's packaging semantics. At Google, for example, +confused by Python's package import semantics. At Google, for example, Guido received complaints from "a large crowd with pitchforks" [2]_ that the requirement for packages to contain an ``__init__`` module was a "misfeature", and should be dropped. @@ -434,7 +434,7 @@ ``import foo.bar.baz`` must wait until ``foo.bar.baz`` is found before creating module objects for *both* ``foo`` and ``foo.bar``, and then create both of them together, properly setting the ``foo`` module's -``.bar`` attrbute to point to the ``foo.bar`` module. +``.bar`` attribute to point to the ``foo.bar`` module. In this way, pure virtual packages are never directly importable: an ``import foo`` or ``import foo.bar`` by itself will fail, and the @@ -603,7 +603,7 @@ accidentally work. Is that good or bad? -For those implementing PEP \302 importer objects: +For those implementing PEP 302 importer objects: * Importers that support the ``iter_modules()`` method (used by ``pkgutil`` to locate importable modules and packages) and want to -- Repository URL: http://hg.python.org/peps From python-checkins at python.org Thu Aug 11 20:46:37 2011 From: python-checkins at python.org (antoine.pitrou) Date: Thu, 11 Aug 2011 20:46:37 +0200 Subject: [Python-checkins] =?utf8?q?peps=3A_Typo?= Message-ID: http://hg.python.org/peps/rev/120578926442 changeset: 3922:120578926442 parent: 3920:074a90b5bcbf user: Antoine Pitrou date: Thu Aug 11 20:34:07 2011 +0200 summary: Typo files: pep-3154.txt | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/pep-3154.txt b/pep-3154.txt --- a/pep-3154.txt +++ b/pep-3154.txt @@ -56,7 +56,7 @@ Many common built-in types (such as str, bytes, dict, list, tuple) have dedicated opcodes to improve resource consumption when serializing and deserializing them; however, sets and frozensets don't. Adding such opcodes -would be an obvious improvements. Also, dedicated set support could help +would be an obvious improvement. Also, dedicated set support could help remove the current impossibility of pickling self-referential sets [2]_. -- Repository URL: http://hg.python.org/peps From python-checkins at python.org Thu Aug 11 20:46:38 2011 From: python-checkins at python.org (antoine.pitrou) Date: Thu, 11 Aug 2011 20:46:38 +0200 Subject: [Python-checkins] =?utf8?q?peps_=28merge_default_-=3E_default=29?= =?utf8?q?=3A_Merge?= Message-ID: http://hg.python.org/peps/rev/3e75f2c9903b changeset: 3923:3e75f2c9903b parent: 3922:120578926442 parent: 3921:0d53159dc0ae user: Antoine Pitrou date: Thu Aug 11 20:44:34 2011 +0200 summary: Merge files: pep-0402.txt | 6 +++--- 1 files changed, 3 insertions(+), 3 deletions(-) diff --git a/pep-0402.txt b/pep-0402.txt old mode 100644 new mode 100755 --- a/pep-0402.txt +++ b/pep-0402.txt @@ -48,7 +48,7 @@ When new users come to Python from other languages, they are often -confused by Python's packaging semantics. At Google, for example, +confused by Python's package import semantics. At Google, for example, Guido received complaints from "a large crowd with pitchforks" [2]_ that the requirement for packages to contain an ``__init__`` module was a "misfeature", and should be dropped. @@ -434,7 +434,7 @@ ``import foo.bar.baz`` must wait until ``foo.bar.baz`` is found before creating module objects for *both* ``foo`` and ``foo.bar``, and then create both of them together, properly setting the ``foo`` module's -``.bar`` attrbute to point to the ``foo.bar`` module. +``.bar`` attribute to point to the ``foo.bar`` module. In this way, pure virtual packages are never directly importable: an ``import foo`` or ``import foo.bar`` by itself will fail, and the @@ -603,7 +603,7 @@ accidentally work. Is that good or bad? -For those implementing PEP \302 importer objects: +For those implementing PEP 302 importer objects: * Importers that support the ``iter_modules()`` method (used by ``pkgutil`` to locate importable modules and packages) and want to -- Repository URL: http://hg.python.org/peps From python-checkins at python.org Thu Aug 11 21:17:54 2011 From: python-checkins at python.org (antoine.pitrou) Date: Thu, 11 Aug 2011 21:17:54 +0200 Subject: [Python-checkins] =?utf8?b?Y3B5dGhvbiAoMy4yKTogSXNzdWUgIzEyNjg3?= =?utf8?q?=3A_Fix_a_possible_buffering_bug_when_unpickling_text_mode_=28pr?= =?utf8?q?otocol?= Message-ID: http://hg.python.org/cpython/rev/c47bc1349e61 changeset: 71817:c47bc1349e61 branch: 3.2 parent: 71814:1e96a4406565 user: Antoine Pitrou date: Thu Aug 11 21:04:02 2011 +0200 summary: Issue #12687: Fix a possible buffering bug when unpickling text mode (protocol 0, mostly) pickles. files: Lib/test/pickletester.py | 13 +++++++++++++ Misc/NEWS | 3 +++ Modules/_pickle.c | 3 +-- 3 files changed, 17 insertions(+), 2 deletions(-) diff --git a/Lib/test/pickletester.py b/Lib/test/pickletester.py --- a/Lib/test/pickletester.py +++ b/Lib/test/pickletester.py @@ -1418,6 +1418,19 @@ def test_multiple_unpicklings_unseekable(self): self._check_multiple_unpicklings(UnseekableIO) + def test_unpickling_buffering_readline(self): + # Issue #12687: the unpickler's buffering logic could fail with + # text mode opcodes. + data = list(range(10)) + for proto in protocols: + for buf_size in range(1, 11): + f = io.BufferedRandom(io.BytesIO(), buffer_size=buf_size) + pickler = self.pickler_class(f, protocol=proto) + pickler.dump(data) + f.seek(0) + unpickler = self.unpickler_class(f) + self.assertEqual(unpickler.load(), data) + if __name__ == "__main__": # Print some stuff that can be used to rewrite DATA{0,1,2} diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -41,6 +41,9 @@ Library ------- +- Issue #12687: Fix a possible buffering bug when unpickling text mode + (protocol 0, mostly) pickles. + - Issue #10087: Fix the html output format of the calendar module. - Issue #12540: Prevent zombie IDLE processes on Windows due to changes diff --git a/Modules/_pickle.c b/Modules/_pickle.c --- a/Modules/_pickle.c +++ b/Modules/_pickle.c @@ -1034,9 +1034,8 @@ num_read = _Unpickler_ReadFromFile(self, READ_WHOLE_LINE); if (num_read < 0) return -1; - *result = self->input_buffer; self->next_read_idx = num_read; - return num_read; + return _Unpickler_CopyLine(self, self->input_buffer, num_read, result); } /* If we get here, we've run off the end of the input string. Return the -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Thu Aug 11 21:17:55 2011 From: python-checkins at python.org (antoine.pitrou) Date: Thu, 11 Aug 2011 21:17:55 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=28merge_3=2E2_-=3E_default=29?= =?utf8?q?=3A_Issue_=2312687=3A_Fix_a_possible_buffering_bug_when_unpickli?= =?utf8?q?ng_text_mode_=28protocol?= Message-ID: http://hg.python.org/cpython/rev/6aa822071f4e changeset: 71818:6aa822071f4e parent: 71816:e88362fb4950 parent: 71817:c47bc1349e61 user: Antoine Pitrou date: Thu Aug 11 21:15:53 2011 +0200 summary: Issue #12687: Fix a possible buffering bug when unpickling text mode (protocol 0, mostly) pickles. files: Lib/test/pickletester.py | 13 +++++++++++++ Misc/NEWS | 3 +++ Modules/_pickle.c | 3 +-- 3 files changed, 17 insertions(+), 2 deletions(-) diff --git a/Lib/test/pickletester.py b/Lib/test/pickletester.py --- a/Lib/test/pickletester.py +++ b/Lib/test/pickletester.py @@ -1438,6 +1438,19 @@ def test_multiple_unpicklings_unseekable(self): self._check_multiple_unpicklings(UnseekableIO) + def test_unpickling_buffering_readline(self): + # Issue #12687: the unpickler's buffering logic could fail with + # text mode opcodes. + data = list(range(10)) + for proto in protocols: + for buf_size in range(1, 11): + f = io.BufferedRandom(io.BytesIO(), buffer_size=buf_size) + pickler = self.pickler_class(f, protocol=proto) + pickler.dump(data) + f.seek(0) + unpickler = self.unpickler_class(f) + self.assertEqual(unpickler.load(), data) + if __name__ == "__main__": # Print some stuff that can be used to rewrite DATA{0,1,2} diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -251,6 +251,9 @@ Library ------- +- Issue #12687: Fix a possible buffering bug when unpickling text mode + (protocol 0, mostly) pickles. + - Issue #10087: Fix the html output format of the calendar module. - Issue #12540: Prevent zombie IDLE processes on Windows due to changes diff --git a/Modules/_pickle.c b/Modules/_pickle.c --- a/Modules/_pickle.c +++ b/Modules/_pickle.c @@ -1034,9 +1034,8 @@ num_read = _Unpickler_ReadFromFile(self, READ_WHOLE_LINE); if (num_read < 0) return -1; - *result = self->input_buffer; self->next_read_idx = num_read; - return num_read; + return _Unpickler_CopyLine(self, self->input_buffer, num_read, result); } /* If we get here, we've run off the end of the input string. Return the -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Thu Aug 11 21:46:57 2011 From: python-checkins at python.org (benjamin.peterson) Date: Thu, 11 Aug 2011 21:46:57 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_revert_code_which_condition?= =?utf8?q?ally_writes_Python-ast=2Eh_=28closes_=2312727=29?= Message-ID: http://hg.python.org/cpython/rev/5e005773feaa changeset: 71819:5e005773feaa parent: 71815:76964d70c81c user: Benjamin Peterson date: Thu Aug 11 14:42:28 2011 -0500 summary: revert code which conditionally writes Python-ast.h (closes #12727) files: Parser/asdl_c.py | 26 +++----------------------- 1 files changed, 3 insertions(+), 23 deletions(-) diff --git a/Parser/asdl_c.py b/Parser/asdl_c.py --- a/Parser/asdl_c.py +++ b/Parser/asdl_c.py @@ -4,10 +4,7 @@ # TO DO # handle fields that have a type but no name -import errno -import os -import sys -import StringIO +import os, sys import subprocess import asdl @@ -1158,7 +1155,7 @@ sys.exit(1) if INC_DIR: p = "%s/%s-ast.h" % (INC_DIR, mod.name) - f = StringIO.StringIO() + f = open(p, "w") f.write(auto_gen_msg) f.write('#include "asdl.h"\n\n') c = ChainOfVisitors(TypeDefVisitor(f), @@ -1169,24 +1166,7 @@ f.write("PyObject* PyAST_mod2obj(mod_ty t);\n") f.write("mod_ty PyAST_obj2mod(PyObject* ast, PyArena* arena, int mode);\n") f.write("int PyAST_Check(PyObject* obj);\n") - s = f.getvalue() - write = True - try: - fp = open(p, "r") - except IOError, e: - if e.errno != errno.ENOENT: - raise - else: - try: - write = fp.read() != s - finally: - fp.close() - if write: - fp = open(p, "w") - try: - fp.write(s) - finally: - fp.close() + f.close() if SRC_DIR: p = os.path.join(SRC_DIR, str(mod.name) + "-ast.c") -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Thu Aug 11 21:46:58 2011 From: python-checkins at python.org (benjamin.peterson) Date: Thu, 11 Aug 2011 21:46:58 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_make_only_files_which_use_A?= =?utf8?q?ST_depend_on_Python-ast=2Eh?= Message-ID: http://hg.python.org/cpython/rev/2c116e3105fd changeset: 71820:2c116e3105fd user: Benjamin Peterson date: Thu Aug 11 14:46:26 2011 -0500 summary: make only files which use AST depend on Python-ast.h files: Makefile.pre.in | 1 - 1 files changed, 0 insertions(+), 1 deletions(-) diff --git a/Makefile.pre.in b/Makefile.pre.in --- a/Makefile.pre.in +++ b/Makefile.pre.in @@ -658,7 +658,6 @@ # Header files PYTHON_HEADERS= \ - Include/Python-ast.h \ Include/Python.h \ Include/abstract.h \ Include/asdl.h \ -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Thu Aug 11 21:46:59 2011 From: python-checkins at python.org (benjamin.peterson) Date: Thu, 11 Aug 2011 21:46:59 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=28merge_default_-=3E_default?= =?utf8?q?=29=3A_merge_heads?= Message-ID: http://hg.python.org/cpython/rev/f44acad741a4 changeset: 71821:f44acad741a4 parent: 71820:2c116e3105fd parent: 71818:6aa822071f4e user: Benjamin Peterson date: Thu Aug 11 14:46:49 2011 -0500 summary: merge heads files: Doc/c-api/object.rst | 13 +++++++++++++ Lib/test/pickletester.py | 13 +++++++++++++ Misc/NEWS | 3 +++ Modules/_pickle.c | 3 +-- 4 files changed, 30 insertions(+), 2 deletions(-) diff --git a/Doc/c-api/object.rst b/Doc/c-api/object.rst --- a/Doc/c-api/object.rst +++ b/Doc/c-api/object.rst @@ -6,6 +6,19 @@ =============== +.. c:var:: PyObject* Py_NotImplemented + + The ``NotImplemented`` singleton, used to signal that an operation is + not implemented for the given type combination. + + +.. c:macro:: Py_RETURN_NOTIMPLEMENTED + + Properly handle returning :c:data:`Py_NotImplemented` from within a C + function (that is, increment the reference count of NotImplemented and + return it). + + .. c:function:: int PyObject_Print(PyObject *o, FILE *fp, int flags) Print an object *o*, on file *fp*. Returns ``-1`` on error. The flags argument diff --git a/Lib/test/pickletester.py b/Lib/test/pickletester.py --- a/Lib/test/pickletester.py +++ b/Lib/test/pickletester.py @@ -1438,6 +1438,19 @@ def test_multiple_unpicklings_unseekable(self): self._check_multiple_unpicklings(UnseekableIO) + def test_unpickling_buffering_readline(self): + # Issue #12687: the unpickler's buffering logic could fail with + # text mode opcodes. + data = list(range(10)) + for proto in protocols: + for buf_size in range(1, 11): + f = io.BufferedRandom(io.BytesIO(), buffer_size=buf_size) + pickler = self.pickler_class(f, protocol=proto) + pickler.dump(data) + f.seek(0) + unpickler = self.unpickler_class(f) + self.assertEqual(unpickler.load(), data) + if __name__ == "__main__": # Print some stuff that can be used to rewrite DATA{0,1,2} diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -251,6 +251,9 @@ Library ------- +- Issue #12687: Fix a possible buffering bug when unpickling text mode + (protocol 0, mostly) pickles. + - Issue #10087: Fix the html output format of the calendar module. - Issue #12540: Prevent zombie IDLE processes on Windows due to changes diff --git a/Modules/_pickle.c b/Modules/_pickle.c --- a/Modules/_pickle.c +++ b/Modules/_pickle.c @@ -1034,9 +1034,8 @@ num_read = _Unpickler_ReadFromFile(self, READ_WHOLE_LINE); if (num_read < 0) return -1; - *result = self->input_buffer; self->next_read_idx = num_read; - return num_read; + return _Unpickler_CopyLine(self, self->input_buffer, num_read, result); } /* If we get here, we've run off the end of the input string. Return the -- Repository URL: http://hg.python.org/cpython From solipsis at pitrou.net Fri Aug 12 05:26:15 2011 From: solipsis at pitrou.net (solipsis at pitrou.net) Date: Fri, 12 Aug 2011 05:26:15 +0200 Subject: [Python-checkins] Daily reference leaks (f44acad741a4): sum=0 Message-ID: results for f44acad741a4 on branch "default" -------------------------------------------- Command line was: ['./python', '-m', 'test.regrtest', '-uall', '-R', '3:3:/home/antoine/cpython/refleaks/reflog5NhUeA', '-x'] From python-checkins at python.org Fri Aug 12 10:40:59 2011 From: python-checkins at python.org (eli.bendersky) Date: Fri, 12 Aug 2011 10:40:59 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Issue_=2312672=3A_fix_code_?= =?utf8?q?samples_in_extending/newtypes=2Ehtml_for_PEP-7_compliance?= Message-ID: http://hg.python.org/cpython/rev/683202530137 changeset: 71822:683202530137 user: Eli Bendersky date: Fri Aug 12 11:40:39 2011 +0300 summary: Issue #12672: fix code samples in extending/newtypes.html for PEP-7 compliance files: Doc/extending/newtypes.rst | 10 +- Doc/includes/noddy2.c | 34 ++++---- Doc/includes/noddy3.c | 98 ++++++++++++------------- Doc/includes/noddy4.c | 32 +++---- 4 files changed, 83 insertions(+), 91 deletions(-) diff --git a/Doc/extending/newtypes.rst b/Doc/extending/newtypes.rst --- a/Doc/extending/newtypes.rst +++ b/Doc/extending/newtypes.rst @@ -289,18 +289,16 @@ self = (Noddy *)type->tp_alloc(type, 0); if (self != NULL) { self->first = PyString_FromString(""); - if (self->first == NULL) - { + if (self->first == NULL) { Py_DECREF(self); return NULL; - } + } self->last = PyString_FromString(""); - if (self->last == NULL) - { + if (self->last == NULL) { Py_DECREF(self); return NULL; - } + } self->number = 0; } diff --git a/Doc/includes/noddy2.c b/Doc/includes/noddy2.c --- a/Doc/includes/noddy2.c +++ b/Doc/includes/noddy2.c @@ -24,18 +24,16 @@ self = (Noddy *)type->tp_alloc(type, 0); if (self != NULL) { self->first = PyUnicode_FromString(""); - if (self->first == NULL) - { + if (self->first == NULL) { Py_DECREF(self); return NULL; - } - + } + self->last = PyUnicode_FromString(""); - if (self->last == NULL) - { + if (self->last == NULL) { Py_DECREF(self); return NULL; - } + } self->number = 0; } @@ -50,10 +48,10 @@ static char *kwlist[] = {"first", "last", "number", NULL}; - if (! PyArg_ParseTupleAndKeywords(args, kwds, "|OOi", kwlist, - &first, &last, + if (! PyArg_ParseTupleAndKeywords(args, kwds, "|OOi", kwlist, + &first, &last, &self->number)) - return -1; + return -1; if (first) { tmp = self->first; @@ -111,7 +109,7 @@ result = PyUnicode_Format(format, args); Py_DECREF(args); - + return result; } @@ -145,12 +143,12 @@ Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, /* tp_flags */ "Noddy objects", /* tp_doc */ - 0, /* tp_traverse */ - 0, /* tp_clear */ - 0, /* tp_richcompare */ - 0, /* tp_weaklistoffset */ - 0, /* tp_iter */ - 0, /* tp_iternext */ + 0, /* tp_traverse */ + 0, /* tp_clear */ + 0, /* tp_richcompare */ + 0, /* tp_weaklistoffset */ + 0, /* tp_iter */ + 0, /* tp_iternext */ Noddy_methods, /* tp_methods */ Noddy_members, /* tp_members */ 0, /* tp_getset */ @@ -173,7 +171,7 @@ }; PyMODINIT_FUNC -PyInit_noddy2(void) +PyInit_noddy2(void) { PyObject* m; diff --git a/Doc/includes/noddy3.c b/Doc/includes/noddy3.c --- a/Doc/includes/noddy3.c +++ b/Doc/includes/noddy3.c @@ -24,18 +24,16 @@ self = (Noddy *)type->tp_alloc(type, 0); if (self != NULL) { self->first = PyUnicode_FromString(""); - if (self->first == NULL) - { + if (self->first == NULL) { Py_DECREF(self); return NULL; - } - + } + self->last = PyUnicode_FromString(""); - if (self->last == NULL) - { + if (self->last == NULL) { Py_DECREF(self); return NULL; - } + } self->number = 0; } @@ -50,10 +48,10 @@ static char *kwlist[] = {"first", "last", "number", NULL}; - if (! PyArg_ParseTupleAndKeywords(args, kwds, "|SSi", kwlist, - &first, &last, + if (! PyArg_ParseTupleAndKeywords(args, kwds, "|SSi", kwlist, + &first, &last, &self->number)) - return -1; + return -1; if (first) { tmp = self->first; @@ -88,22 +86,22 @@ static int Noddy_setfirst(Noddy *self, PyObject *value, void *closure) { - if (value == NULL) { - PyErr_SetString(PyExc_TypeError, "Cannot delete the first attribute"); - return -1; - } - - if (! PyUnicode_Check(value)) { - PyErr_SetString(PyExc_TypeError, - "The first attribute value must be a string"); - return -1; - } - - Py_DECREF(self->first); - Py_INCREF(value); - self->first = value; + if (value == NULL) { + PyErr_SetString(PyExc_TypeError, "Cannot delete the first attribute"); + return -1; + } - return 0; + if (! PyUnicode_Check(value)) { + PyErr_SetString(PyExc_TypeError, + "The first attribute value must be a string"); + return -1; + } + + Py_DECREF(self->first); + Py_INCREF(value); + self->first = value; + + return 0; } static PyObject * @@ -116,30 +114,30 @@ static int Noddy_setlast(Noddy *self, PyObject *value, void *closure) { - if (value == NULL) { - PyErr_SetString(PyExc_TypeError, "Cannot delete the last attribute"); - return -1; - } - - if (! PyUnicode_Check(value)) { - PyErr_SetString(PyExc_TypeError, - "The last attribute value must be a string"); - return -1; - } - - Py_DECREF(self->last); - Py_INCREF(value); - self->last = value; + if (value == NULL) { + PyErr_SetString(PyExc_TypeError, "Cannot delete the last attribute"); + return -1; + } - return 0; + if (! PyUnicode_Check(value)) { + PyErr_SetString(PyExc_TypeError, + "The last attribute value must be a string"); + return -1; + } + + Py_DECREF(self->last); + Py_INCREF(value); + self->last = value; + + return 0; } static PyGetSetDef Noddy_getseters[] = { - {"first", + {"first", (getter)Noddy_getfirst, (setter)Noddy_setfirst, "first name", NULL}, - {"last", + {"last", (getter)Noddy_getlast, (setter)Noddy_setlast, "last name", NULL}, @@ -164,7 +162,7 @@ result = PyUnicode_Format(format, args); Py_DECREF(args); - + return result; } @@ -198,12 +196,12 @@ Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, /* tp_flags */ "Noddy objects", /* tp_doc */ - 0, /* tp_traverse */ - 0, /* tp_clear */ - 0, /* tp_richcompare */ - 0, /* tp_weaklistoffset */ - 0, /* tp_iter */ - 0, /* tp_iternext */ + 0, /* tp_traverse */ + 0, /* tp_clear */ + 0, /* tp_richcompare */ + 0, /* tp_weaklistoffset */ + 0, /* tp_iter */ + 0, /* tp_iternext */ Noddy_methods, /* tp_methods */ Noddy_members, /* tp_members */ Noddy_getseters, /* tp_getset */ @@ -226,7 +224,7 @@ }; PyMODINIT_FUNC -PyInit_noddy3(void) +PyInit_noddy3(void) { PyObject* m; diff --git a/Doc/includes/noddy4.c b/Doc/includes/noddy4.c --- a/Doc/includes/noddy4.c +++ b/Doc/includes/noddy4.c @@ -27,7 +27,7 @@ return 0; } -static int +static int Noddy_clear(Noddy *self) { PyObject *tmp; @@ -58,18 +58,16 @@ self = (Noddy *)type->tp_alloc(type, 0); if (self != NULL) { self->first = PyUnicode_FromString(""); - if (self->first == NULL) - { + if (self->first == NULL) { Py_DECREF(self); return NULL; - } - + } + self->last = PyUnicode_FromString(""); - if (self->last == NULL) - { + if (self->last == NULL) { Py_DECREF(self); return NULL; - } + } self->number = 0; } @@ -84,10 +82,10 @@ static char *kwlist[] = {"first", "last", "number", NULL}; - if (! PyArg_ParseTupleAndKeywords(args, kwds, "|OOi", kwlist, - &first, &last, + if (! PyArg_ParseTupleAndKeywords(args, kwds, "|OOi", kwlist, + &first, &last, &self->number)) - return -1; + return -1; if (first) { tmp = self->first; @@ -145,7 +143,7 @@ result = PyUnicode_Format(format, args); Py_DECREF(args); - + return result; } @@ -182,10 +180,10 @@ "Noddy objects", /* tp_doc */ (traverseproc)Noddy_traverse, /* tp_traverse */ (inquiry)Noddy_clear, /* tp_clear */ - 0, /* tp_richcompare */ - 0, /* tp_weaklistoffset */ - 0, /* tp_iter */ - 0, /* tp_iternext */ + 0, /* tp_richcompare */ + 0, /* tp_weaklistoffset */ + 0, /* tp_iter */ + 0, /* tp_iternext */ Noddy_methods, /* tp_methods */ Noddy_members, /* tp_members */ 0, /* tp_getset */ @@ -208,7 +206,7 @@ }; PyMODINIT_FUNC -PyInit_noddy4(void) +PyInit_noddy4(void) { PyObject* m; -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Aug 12 17:16:23 2011 From: python-checkins at python.org (antoine.pitrou) Date: Fri, 12 Aug 2011 17:16:23 +0200 Subject: [Python-checkins] =?utf8?q?peps=3A_Propose_a_str_opcode_with_a_1-?= =?utf8?q?byte_length?= Message-ID: http://hg.python.org/peps/rev/d25a7c2a51a6 changeset: 3924:d25a7c2a51a6 user: Antoine Pitrou date: Fri Aug 12 17:14:23 2011 +0200 summary: Propose a str opcode with a 1-byte length files: pep-3154.txt | 7 +++++++ 1 files changed, 7 insertions(+), 0 deletions(-) diff --git a/pep-3154.txt b/pep-3154.txt --- a/pep-3154.txt +++ b/pep-3154.txt @@ -72,6 +72,13 @@ It seems that all other opcodes emitted when using protocol 3 already use binary encoding. +Better string encoding +---------------------- + +Short str objects currently have their length coded as a 4-bytes integer, +which is wasteful. A specific opcode with a 1-byte length would make +many pickles smaller. + Acknowledgments -- Repository URL: http://hg.python.org/peps From python-checkins at python.org Fri Aug 12 18:03:54 2011 From: python-checkins at python.org (eric.araujo) Date: Fri, 12 Aug 2011 18:03:54 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=283=2E2=29=3A_Test_pipes=2Equ?= =?utf8?q?ote_with_a_few_non-ASCII_characters_=28see_=239723=29=2E?= Message-ID: http://hg.python.org/cpython/rev/8032ea4c3619 changeset: 71823:8032ea4c3619 branch: 3.2 parent: 71786:7ea5d9c858f1 user: ?ric Araujo date: Tue Aug 09 23:03:43 2011 +0200 summary: Test pipes.quote with a few non-ASCII characters (see #9723). That pipes.quote thinks all non-ASCII characters need to be quoted may be a bug, but right now I?m committing this test to make sure I haven?t introduced a behavior change in 3.3 when I simplified the code to use a regex (in 5966eeb0457d). files: Lib/test/test_pipes.py | 3 ++- 1 files changed, 2 insertions(+), 1 deletions(-) diff --git a/Lib/test/test_pipes.py b/Lib/test/test_pipes.py --- a/Lib/test/test_pipes.py +++ b/Lib/test/test_pipes.py @@ -81,7 +81,8 @@ def testQuoting(self): safeunquoted = string.ascii_letters + string.digits + '@%_-+=:,./' - unsafe = '"`$\\!' + unicode_sample = '\xe9\xe0\xdf' # e + acute accent, a + grave, sharp s + unsafe = '"`$\\!' + unicode_sample self.assertEqual(pipes.quote(''), "''") self.assertEqual(pipes.quote(safeunquoted), safeunquoted) -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Aug 12 18:03:54 2011 From: python-checkins at python.org (eric.araujo) Date: Fri, 12 Aug 2011 18:03:54 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=28merge_3=2E2_-=3E_default=29?= =?utf8?q?=3A_Avoid_unwanted_behavior_change_in_shlex=2Equote_=28see_=2397?= =?utf8?q?23=29=2E?= Message-ID: http://hg.python.org/cpython/rev/6ae0345a7e29 changeset: 71824:6ae0345a7e29 parent: 71787:69b354a8c90f parent: 71823:8032ea4c3619 user: ?ric Araujo date: Tue Aug 09 23:18:06 2011 +0200 summary: Avoid unwanted behavior change in shlex.quote (see #9723). I simplified the quote code to use a regex instead of a loop+test when I moved pipes.quote to shlex in 5966eeb0457d; Ezio Melotti pointed out that my regex contained redundant parts (now removed) and allowed non-ASCII characters (now disallowed). I think common UNIX shells don?t quote non-ASCII characters, but there?s no harm in doing so. We?ll see if users request a change. files: Lib/shlex.py | 2 +- Lib/test/test_shlex.py | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/Lib/shlex.py b/Lib/shlex.py --- a/Lib/shlex.py +++ b/Lib/shlex.py @@ -276,7 +276,7 @@ return list(lex) -_find_unsafe = re.compile(r'[^\w\d@%_\-\+=:,\./]').search +_find_unsafe = re.compile(r'[^\w@%\-\+=:,\./]', re.ASCII).search def quote(s): """Return a shell-escaped version of the string *s*.""" diff --git a/Lib/test/test_shlex.py b/Lib/test/test_shlex.py --- a/Lib/test/test_shlex.py +++ b/Lib/test/test_shlex.py @@ -176,7 +176,8 @@ def testQuote(self): safeunquoted = string.ascii_letters + string.digits + '@%_-+=:,./' - unsafe = '"`$\\!' + unicode_sample = '\xe9\xe0\xdf' # e + acute accent, a + grave, sharp s + unsafe = '"`$\\!' + unicode_sample self.assertEqual(shlex.quote(''), "''") self.assertEqual(shlex.quote(safeunquoted), safeunquoted) -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Aug 12 18:03:55 2011 From: python-checkins at python.org (eric.araujo) Date: Fri, 12 Aug 2011 18:03:55 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Revert_cosmetic_change=2E?= Message-ID: http://hg.python.org/cpython/rev/360f6f4b9ecb changeset: 71825:360f6f4b9ecb user: ?ric Araujo date: Wed Aug 10 02:46:33 2011 +0200 summary: Revert cosmetic change. A reminder: distutils only gets bug fixes. Cosmetic changes, especially in tests, are not worth the time spent, and can even make future merges of bugfixes a bit less easy. files: Lib/distutils/tests/test_build_py.py | 6 +++--- 1 files changed, 3 insertions(+), 3 deletions(-) diff --git a/Lib/distutils/tests/test_build_py.py b/Lib/distutils/tests/test_build_py.py --- a/Lib/distutils/tests/test_build_py.py +++ b/Lib/distutils/tests/test_build_py.py @@ -10,7 +10,7 @@ from distutils.errors import DistutilsFileError from distutils.tests import support -from test.support import run_unittest, create_empty_file +from test.support import run_unittest class BuildPyTestCase(support.TempdirManager, @@ -71,11 +71,11 @@ # create the distribution files. sources = self.mkdtemp() - create_empty_file(os.path.join(sources, "__init__.py")) + open(os.path.join(sources, "__init__.py"), "w").close() testdir = os.path.join(sources, "doc") os.mkdir(testdir) - create_empty_file(os.path.join(testdir, "testfile")) + open(os.path.join(testdir, "testfile"), "w").close() os.chdir(sources) old_stdout = sys.stdout -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Aug 12 18:03:56 2011 From: python-checkins at python.org (eric.araujo) Date: Fri, 12 Aug 2011 18:03:56 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=283=2E2=29=3A_Fix_find_comman?= =?utf8?q?d_in_makefile_=E2=80=9Cfunny=E2=80=9D_target?= Message-ID: http://hg.python.org/cpython/rev/1b818f3639ef changeset: 71826:1b818f3639ef branch: 3.2 parent: 71823:8032ea4c3619 user: ?ric Araujo date: Wed Aug 10 02:01:32 2011 +0200 summary: Fix find command in makefile ?funny? target files: Makefile.pre.in | 4 ++-- 1 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Makefile.pre.in b/Makefile.pre.in --- a/Makefile.pre.in +++ b/Makefile.pre.in @@ -1283,7 +1283,7 @@ # Find files with funny names funny: - find $(DISTDIRS) \ + find $(SUBDIRS) $(SUBDIRSTOO) \ -name .svn -prune \ -o -type d \ -o -name '*.[chs]' \ @@ -1313,7 +1313,7 @@ -o -name .hgignore \ -o -name .bzrignore \ -o -name MANIFEST \ - -o -print + -print # Perform some verification checks on any modified files. patchcheck: -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Aug 12 18:03:57 2011 From: python-checkins at python.org (eric.araujo) Date: Fri, 12 Aug 2011 18:03:57 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=283=2E2=29=3A_Use_real_word_i?= =?utf8?q?n_English_text_=28i=2Ee=2E_not_code=29?= Message-ID: http://hg.python.org/cpython/rev/d79b2ce01438 changeset: 71827:d79b2ce01438 branch: 3.2 user: ?ric Araujo date: Wed Aug 10 04:19:03 2011 +0200 summary: Use real word in English text (i.e. not code) files: Doc/library/argparse.rst | 38 ++++++++++++++-------------- 1 files changed, 19 insertions(+), 19 deletions(-) diff --git a/Doc/library/argparse.rst b/Doc/library/argparse.rst --- a/Doc/library/argparse.rst +++ b/Doc/library/argparse.rst @@ -2,7 +2,7 @@ =============================================================================== .. module:: argparse - :synopsis: Command-line option and argument-parsing library. + :synopsis: Command-line option and argument parsing library. .. moduleauthor:: Steven Bethard .. sectionauthor:: Steven Bethard @@ -107,7 +107,7 @@ Parsing arguments ^^^^^^^^^^^^^^^^^ -:class:`ArgumentParser` parses args through the +:class:`ArgumentParser` parses arguments through the :meth:`~ArgumentParser.parse_args` method. This will inspect the command line, convert each arg to the appropriate type and then invoke the appropriate action. In most cases, this means a simple :class:`Namespace` object will be built up from @@ -118,7 +118,7 @@ In a script, :meth:`~ArgumentParser.parse_args` will typically be called with no arguments, and the :class:`ArgumentParser` will automatically determine the -command-line args from :data:`sys.argv`. +command-line arguments from :data:`sys.argv`. ArgumentParser objects @@ -650,11 +650,11 @@ action ^^^^^^ -:class:`ArgumentParser` objects associate command-line args with actions. These -actions can do just about anything with the command-line args associated with +:class:`ArgumentParser` objects associate command-line arguments with actions. These +actions can do just about anything with the command-line arguments associated with them, though most actions simply add an attribute to the object returned by :meth:`~ArgumentParser.parse_args`. The ``action`` keyword argument specifies -how the command-line args should be handled. The supported actions are: +how the command-line arguments should be handled. The supported actions are: * ``'store'`` - This just stores the argument's value. This is the default action. For example:: @@ -726,8 +726,8 @@ :meth:`~ArgumentParser.parse_args`. Most actions add an attribute to this object. -* ``values`` - The associated command-line args, with any type-conversions - applied. (Type-conversions are specified with the type_ keyword argument to +* ``values`` - The associated command-line arguments, with any type conversions + applied. (Type conversions are specified with the type_ keyword argument to :meth:`~ArgumentParser.add_argument`. * ``option_string`` - The option string that was used to invoke this action. @@ -759,7 +759,7 @@ different number of command-line arguments with a single action. The supported values are: -* N (an integer). N args from the command line will be gathered together into a +* N (an integer). N arguments from the command line will be gathered together into a list. For example:: >>> parser = argparse.ArgumentParser() @@ -803,7 +803,7 @@ Namespace(infile=<_io.TextIOWrapper name='' encoding='UTF-8'>, outfile=<_io.TextIOWrapper name='' encoding='UTF-8'>) -* ``'*'``. All command-line args present are gathered into a list. Note that +* ``'*'``. All command-line arguments present are gathered into a list. Note that it generally doesn't make much sense to have more than one positional argument with ``nargs='*'``, but multiple optional arguments with ``nargs='*'`` is possible. For example:: @@ -827,7 +827,7 @@ usage: PROG [-h] foo [foo ...] PROG: error: too few arguments -If the ``nargs`` keyword argument is not provided, the number of args consumed +If the ``nargs`` keyword argument is not provided, the number of arguments consumed is determined by the action_. Generally this means a single command-line arg will be consumed and a single item (not a list) will be produced. @@ -845,7 +845,7 @@ * When :meth:`~ArgumentParser.add_argument` is called with option strings (like ``-f`` or ``--foo``) and ``nargs='?'``. This creates an optional - argument that can be followed by zero or one command-line args. + argument that can be followed by zero or one command-line arguments. When parsing the command line, if the option string is encountered with no command-line arg following it, the value of ``const`` will be assumed instead. See the nargs_ description for examples. @@ -895,11 +895,11 @@ type ^^^^ -By default, :class:`ArgumentParser` objects read command-line args in as simple +By default, :class:`ArgumentParser` objects read command-line arguments in as simple strings. However, quite often the command-line string should instead be interpreted as another type, like a :class:`float` or :class:`int`. The ``type`` keyword argument of :meth:`~ArgumentParser.add_argument` allows any -necessary type-checking and type-conversions to be performed. Common built-in +necessary type-checking and type conversions to be performed. Common built-in types and functions can be used directly as the value of the ``type`` argument:: >>> parser = argparse.ArgumentParser() @@ -919,7 +919,7 @@ Namespace(bar=<_io.TextIOWrapper name='out.txt' encoding='UTF-8'>) ``type=`` can take any callable that takes a single string argument and returns -the type-converted value:: +the converted value:: >>> def perfect_square(string): ... value = int(string) @@ -954,7 +954,7 @@ choices ^^^^^^^ -Some command-line args should be selected from a restricted set of values. +Some command-line arguments should be selected from a restricted set of values. These can be handled by passing a container object as the ``choices`` keyword argument to :meth:`~ArgumentParser.add_argument`. When the command line is parsed, arg values will be checked, and an error message will be displayed if @@ -1312,7 +1312,7 @@ Beyond ``sys.argv`` ^^^^^^^^^^^^^^^^^^^ -Sometimes it may be useful to have an ArgumentParser parse args other than those +Sometimes it may be useful to have an ArgumentParser parse arguments other than those of :data:`sys.argv`. This can be accomplished by passing a list of strings to :meth:`~ArgumentParser.parse_args`. This is useful for testing at the interactive prompt:: @@ -1540,7 +1540,7 @@ The :class:`FileType` factory creates objects that can be passed to the type argument of :meth:`ArgumentParser.add_argument`. Arguments that have - :class:`FileType` objects as their type will open command-line args as files + :class:`FileType` objects as their type will open command-line arguments as files with the requested modes and buffer sizes: >>> parser = argparse.ArgumentParser() @@ -1654,7 +1654,7 @@ .. method:: ArgumentParser.set_defaults(**kwargs) Most of the time, the attributes of the object returned by :meth:`parse_args` - will be fully determined by inspecting the command-line args and the argument + will be fully determined by inspecting the command-line arguments and the argument actions. :meth:`set_defaults` allows some additional attributes that are determined without any inspection of the command line to be added:: -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Aug 12 18:03:58 2011 From: python-checkins at python.org (eric.araujo) Date: Fri, 12 Aug 2011 18:03:58 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Remove_unused_names_in_exce?= =?utf8?q?pt_clauses?= Message-ID: http://hg.python.org/cpython/rev/552efc3eb45c changeset: 71828:552efc3eb45c parent: 71825:360f6f4b9ecb user: ?ric Araujo date: Wed Aug 10 20:54:33 2011 +0200 summary: Remove unused names in except clauses files: Lib/shutil.py | 6 +++--- 1 files changed, 3 insertions(+), 3 deletions(-) diff --git a/Lib/shutil.py b/Lib/shutil.py --- a/Lib/shutil.py +++ b/Lib/shutil.py @@ -267,7 +267,7 @@ names = [] try: names = os.listdir(path) - except os.error as err: + except os.error: onerror(os.listdir, path, sys.exc_info()) for name in names: fullname = os.path.join(path, name) @@ -280,7 +280,7 @@ else: try: os.remove(fullname) - except os.error as err: + except os.error: onerror(os.remove, fullname, sys.exc_info()) try: os.rmdir(path) @@ -323,7 +323,7 @@ raise Error("Destination path '%s' already exists" % real_dst) try: os.rename(src, real_dst) - except OSError as exc: + except OSError: if os.path.isdir(src): if _destinsrc(src, dst): raise Error("Cannot move a directory '%s' into itself '%s'." % (src, dst)) -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Aug 12 18:03:58 2011 From: python-checkins at python.org (eric.araujo) Date: Fri, 12 Aug 2011 18:03:58 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=28merge_3=2E2_-=3E_default=29?= =?utf8?q?=3A_Merge_3=2E2?= Message-ID: http://hg.python.org/cpython/rev/b4411e23cca0 changeset: 71829:b4411e23cca0 parent: 71828:552efc3eb45c parent: 71827:d79b2ce01438 user: ?ric Araujo date: Wed Aug 10 21:42:23 2011 +0200 summary: Merge 3.2 files: Doc/library/argparse.rst | 38 ++++++++++++++-------------- Makefile.pre.in | 6 ++-- 2 files changed, 22 insertions(+), 22 deletions(-) diff --git a/Doc/library/argparse.rst b/Doc/library/argparse.rst --- a/Doc/library/argparse.rst +++ b/Doc/library/argparse.rst @@ -2,7 +2,7 @@ =============================================================================== .. module:: argparse - :synopsis: Command-line option and argument-parsing library. + :synopsis: Command-line option and argument parsing library. .. moduleauthor:: Steven Bethard .. sectionauthor:: Steven Bethard @@ -107,7 +107,7 @@ Parsing arguments ^^^^^^^^^^^^^^^^^ -:class:`ArgumentParser` parses args through the +:class:`ArgumentParser` parses arguments through the :meth:`~ArgumentParser.parse_args` method. This will inspect the command line, convert each arg to the appropriate type and then invoke the appropriate action. In most cases, this means a simple :class:`Namespace` object will be built up from @@ -118,7 +118,7 @@ In a script, :meth:`~ArgumentParser.parse_args` will typically be called with no arguments, and the :class:`ArgumentParser` will automatically determine the -command-line args from :data:`sys.argv`. +command-line arguments from :data:`sys.argv`. ArgumentParser objects @@ -669,11 +669,11 @@ action ^^^^^^ -:class:`ArgumentParser` objects associate command-line args with actions. These -actions can do just about anything with the command-line args associated with +:class:`ArgumentParser` objects associate command-line arguments with actions. These +actions can do just about anything with the command-line arguments associated with them, though most actions simply add an attribute to the object returned by :meth:`~ArgumentParser.parse_args`. The ``action`` keyword argument specifies -how the command-line args should be handled. The supported actions are: +how the command-line arguments should be handled. The supported actions are: * ``'store'`` - This just stores the argument's value. This is the default action. For example:: @@ -745,8 +745,8 @@ :meth:`~ArgumentParser.parse_args`. Most actions add an attribute to this object. -* ``values`` - The associated command-line args, with any type-conversions - applied. (Type-conversions are specified with the type_ keyword argument to +* ``values`` - The associated command-line arguments, with any type conversions + applied. (Type conversions are specified with the type_ keyword argument to :meth:`~ArgumentParser.add_argument`. * ``option_string`` - The option string that was used to invoke this action. @@ -778,7 +778,7 @@ different number of command-line arguments with a single action. The supported values are: -* N (an integer). N args from the command line will be gathered together into a +* N (an integer). N arguments from the command line will be gathered together into a list. For example:: >>> parser = argparse.ArgumentParser() @@ -822,7 +822,7 @@ Namespace(infile=<_io.TextIOWrapper name='' encoding='UTF-8'>, outfile=<_io.TextIOWrapper name='' encoding='UTF-8'>) -* ``'*'``. All command-line args present are gathered into a list. Note that +* ``'*'``. All command-line arguments present are gathered into a list. Note that it generally doesn't make much sense to have more than one positional argument with ``nargs='*'``, but multiple optional arguments with ``nargs='*'`` is possible. For example:: @@ -846,7 +846,7 @@ usage: PROG [-h] foo [foo ...] PROG: error: too few arguments -If the ``nargs`` keyword argument is not provided, the number of args consumed +If the ``nargs`` keyword argument is not provided, the number of arguments consumed is determined by the action_. Generally this means a single command-line arg will be consumed and a single item (not a list) will be produced. @@ -864,7 +864,7 @@ * When :meth:`~ArgumentParser.add_argument` is called with option strings (like ``-f`` or ``--foo``) and ``nargs='?'``. This creates an optional - argument that can be followed by zero or one command-line args. + argument that can be followed by zero or one command-line arguments. When parsing the command line, if the option string is encountered with no command-line arg following it, the value of ``const`` will be assumed instead. See the nargs_ description for examples. @@ -914,11 +914,11 @@ type ^^^^ -By default, :class:`ArgumentParser` objects read command-line args in as simple +By default, :class:`ArgumentParser` objects read command-line arguments in as simple strings. However, quite often the command-line string should instead be interpreted as another type, like a :class:`float` or :class:`int`. The ``type`` keyword argument of :meth:`~ArgumentParser.add_argument` allows any -necessary type-checking and type-conversions to be performed. Common built-in +necessary type-checking and type conversions to be performed. Common built-in types and functions can be used directly as the value of the ``type`` argument:: >>> parser = argparse.ArgumentParser() @@ -938,7 +938,7 @@ Namespace(bar=<_io.TextIOWrapper name='out.txt' encoding='UTF-8'>) ``type=`` can take any callable that takes a single string argument and returns -the type-converted value:: +the converted value:: >>> def perfect_square(string): ... value = int(string) @@ -973,7 +973,7 @@ choices ^^^^^^^ -Some command-line args should be selected from a restricted set of values. +Some command-line arguments should be selected from a restricted set of values. These can be handled by passing a container object as the ``choices`` keyword argument to :meth:`~ArgumentParser.add_argument`. When the command line is parsed, arg values will be checked, and an error message will be displayed if @@ -1331,7 +1331,7 @@ Beyond ``sys.argv`` ^^^^^^^^^^^^^^^^^^^ -Sometimes it may be useful to have an ArgumentParser parse args other than those +Sometimes it may be useful to have an ArgumentParser parse arguments other than those of :data:`sys.argv`. This can be accomplished by passing a list of strings to :meth:`~ArgumentParser.parse_args`. This is useful for testing at the interactive prompt:: @@ -1559,7 +1559,7 @@ The :class:`FileType` factory creates objects that can be passed to the type argument of :meth:`ArgumentParser.add_argument`. Arguments that have - :class:`FileType` objects as their type will open command-line args as files + :class:`FileType` objects as their type will open command-line arguments as files with the requested modes and buffer sizes: >>> parser = argparse.ArgumentParser() @@ -1673,7 +1673,7 @@ .. method:: ArgumentParser.set_defaults(**kwargs) Most of the time, the attributes of the object returned by :meth:`parse_args` - will be fully determined by inspecting the command-line args and the argument + will be fully determined by inspecting the command-line arguments and the argument actions. :meth:`set_defaults` allows some additional attributes that are determined without any inspection of the command line to be added:: diff --git a/Makefile.pre.in b/Makefile.pre.in --- a/Makefile.pre.in +++ b/Makefile.pre.in @@ -1331,8 +1331,8 @@ # Find files with funny names funny: - find $(DISTDIRS) \ - -o -type d \ + find $(SUBDIRS) $(SUBDIRSTOO) \ + -type d \ -o -name '*.[chs]' \ -o -name '*.py' \ -o -name '*.pyw' \ @@ -1360,7 +1360,7 @@ -o -name .hgignore \ -o -name .bzrignore \ -o -name MANIFEST \ - -o -print + -print # Perform some verification checks on any modified files. patchcheck: -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Aug 12 18:04:00 2011 From: python-checkins at python.org (eric.araujo) Date: Fri, 12 Aug 2011 18:04:00 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Fix_directive_markup?= Message-ID: http://hg.python.org/cpython/rev/4f860536efa3 changeset: 71830:4f860536efa3 user: ?ric Araujo date: Wed Aug 10 21:43:13 2011 +0200 summary: Fix directive markup files: Doc/library/email.policy.rst | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Doc/library/email.policy.rst b/Doc/library/email.policy.rst --- a/Doc/library/email.policy.rst +++ b/Doc/library/email.policy.rst @@ -4,7 +4,7 @@ .. module:: email.policy :synopsis: Controlling the parsing and generating of messages -.. versionadded: 3.3 +.. versionadded:: 3.3 The :mod:`email` package's prime focus is the handling of email messages as -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Aug 12 18:04:00 2011 From: python-checkins at python.org (eric.araujo) Date: Fri, 12 Aug 2011 18:04:00 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Use_our_existing_drop-in=2C?= =?utf8?q?_no-op_decorator_instead_of_redefining_it=2E?= Message-ID: http://hg.python.org/cpython/rev/e0ac2f603a71 changeset: 71831:e0ac2f603a71 user: ?ric Araujo date: Fri Aug 12 00:15:41 2011 +0200 summary: Use our existing drop-in, no-op decorator instead of redefining it. Patch by Francisco Martin Brugue. files: Lib/packaging/tests/test_pypi_xmlrpc.py | 8 ++------ 1 files changed, 2 insertions(+), 6 deletions(-) diff --git a/Lib/packaging/tests/test_pypi_xmlrpc.py b/Lib/packaging/tests/test_pypi_xmlrpc.py --- a/Lib/packaging/tests/test_pypi_xmlrpc.py +++ b/Lib/packaging/tests/test_pypi_xmlrpc.py @@ -3,18 +3,14 @@ from packaging.pypi.xmlrpc import Client, InvalidSearchField, ProjectNotFound from packaging.tests import unittest +from packaging.tests.support import fake_dec try: import threading from packaging.tests.pypi_server import use_xmlrpc_server except ImportError: threading = None - def use_xmlrpc_server(): - def _use(func): - def __use(*args, **kw): - return func(*args, **kw) - return __use - return _use + use_xmlrpc_server = fake_dec @unittest.skipIf(threading is None, "Needs threading") -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Aug 12 18:04:01 2011 From: python-checkins at python.org (eric.araujo) Date: Fri, 12 Aug 2011 18:04:01 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=28merge_default_-=3E_default?= =?utf8?q?=29=3A_Brench_merge?= Message-ID: http://hg.python.org/cpython/rev/5eac01a77038 changeset: 71832:5eac01a77038 parent: 71822:683202530137 parent: 71831:e0ac2f603a71 user: ?ric Araujo date: Fri Aug 12 17:59:25 2011 +0200 summary: Brench merge files: Doc/library/argparse.rst | 38 ++++++------ Doc/library/email.policy.rst | 2 +- Lib/distutils/tests/test_build_py.py | 6 +- Lib/packaging/tests/test_pypi_xmlrpc.py | 8 +-- Lib/shlex.py | 2 +- Lib/shutil.py | 6 +- Lib/test/test_shlex.py | 3 +- Makefile.pre.in | 6 +- 8 files changed, 34 insertions(+), 37 deletions(-) diff --git a/Doc/library/argparse.rst b/Doc/library/argparse.rst --- a/Doc/library/argparse.rst +++ b/Doc/library/argparse.rst @@ -2,7 +2,7 @@ =============================================================================== .. module:: argparse - :synopsis: Command-line option and argument-parsing library. + :synopsis: Command-line option and argument parsing library. .. moduleauthor:: Steven Bethard .. sectionauthor:: Steven Bethard @@ -107,7 +107,7 @@ Parsing arguments ^^^^^^^^^^^^^^^^^ -:class:`ArgumentParser` parses args through the +:class:`ArgumentParser` parses arguments through the :meth:`~ArgumentParser.parse_args` method. This will inspect the command line, convert each arg to the appropriate type and then invoke the appropriate action. In most cases, this means a simple :class:`Namespace` object will be built up from @@ -118,7 +118,7 @@ In a script, :meth:`~ArgumentParser.parse_args` will typically be called with no arguments, and the :class:`ArgumentParser` will automatically determine the -command-line args from :data:`sys.argv`. +command-line arguments from :data:`sys.argv`. ArgumentParser objects @@ -669,11 +669,11 @@ action ^^^^^^ -:class:`ArgumentParser` objects associate command-line args with actions. These -actions can do just about anything with the command-line args associated with +:class:`ArgumentParser` objects associate command-line arguments with actions. These +actions can do just about anything with the command-line arguments associated with them, though most actions simply add an attribute to the object returned by :meth:`~ArgumentParser.parse_args`. The ``action`` keyword argument specifies -how the command-line args should be handled. The supported actions are: +how the command-line arguments should be handled. The supported actions are: * ``'store'`` - This just stores the argument's value. This is the default action. For example:: @@ -745,8 +745,8 @@ :meth:`~ArgumentParser.parse_args`. Most actions add an attribute to this object. -* ``values`` - The associated command-line args, with any type-conversions - applied. (Type-conversions are specified with the type_ keyword argument to +* ``values`` - The associated command-line arguments, with any type conversions + applied. (Type conversions are specified with the type_ keyword argument to :meth:`~ArgumentParser.add_argument`. * ``option_string`` - The option string that was used to invoke this action. @@ -778,7 +778,7 @@ different number of command-line arguments with a single action. The supported values are: -* N (an integer). N args from the command line will be gathered together into a +* N (an integer). N arguments from the command line will be gathered together into a list. For example:: >>> parser = argparse.ArgumentParser() @@ -822,7 +822,7 @@ Namespace(infile=<_io.TextIOWrapper name='' encoding='UTF-8'>, outfile=<_io.TextIOWrapper name='' encoding='UTF-8'>) -* ``'*'``. All command-line args present are gathered into a list. Note that +* ``'*'``. All command-line arguments present are gathered into a list. Note that it generally doesn't make much sense to have more than one positional argument with ``nargs='*'``, but multiple optional arguments with ``nargs='*'`` is possible. For example:: @@ -846,7 +846,7 @@ usage: PROG [-h] foo [foo ...] PROG: error: too few arguments -If the ``nargs`` keyword argument is not provided, the number of args consumed +If the ``nargs`` keyword argument is not provided, the number of arguments consumed is determined by the action_. Generally this means a single command-line arg will be consumed and a single item (not a list) will be produced. @@ -864,7 +864,7 @@ * When :meth:`~ArgumentParser.add_argument` is called with option strings (like ``-f`` or ``--foo``) and ``nargs='?'``. This creates an optional - argument that can be followed by zero or one command-line args. + argument that can be followed by zero or one command-line arguments. When parsing the command line, if the option string is encountered with no command-line arg following it, the value of ``const`` will be assumed instead. See the nargs_ description for examples. @@ -914,11 +914,11 @@ type ^^^^ -By default, :class:`ArgumentParser` objects read command-line args in as simple +By default, :class:`ArgumentParser` objects read command-line arguments in as simple strings. However, quite often the command-line string should instead be interpreted as another type, like a :class:`float` or :class:`int`. The ``type`` keyword argument of :meth:`~ArgumentParser.add_argument` allows any -necessary type-checking and type-conversions to be performed. Common built-in +necessary type-checking and type conversions to be performed. Common built-in types and functions can be used directly as the value of the ``type`` argument:: >>> parser = argparse.ArgumentParser() @@ -938,7 +938,7 @@ Namespace(bar=<_io.TextIOWrapper name='out.txt' encoding='UTF-8'>) ``type=`` can take any callable that takes a single string argument and returns -the type-converted value:: +the converted value:: >>> def perfect_square(string): ... value = int(string) @@ -973,7 +973,7 @@ choices ^^^^^^^ -Some command-line args should be selected from a restricted set of values. +Some command-line arguments should be selected from a restricted set of values. These can be handled by passing a container object as the ``choices`` keyword argument to :meth:`~ArgumentParser.add_argument`. When the command line is parsed, arg values will be checked, and an error message will be displayed if @@ -1331,7 +1331,7 @@ Beyond ``sys.argv`` ^^^^^^^^^^^^^^^^^^^ -Sometimes it may be useful to have an ArgumentParser parse args other than those +Sometimes it may be useful to have an ArgumentParser parse arguments other than those of :data:`sys.argv`. This can be accomplished by passing a list of strings to :meth:`~ArgumentParser.parse_args`. This is useful for testing at the interactive prompt:: @@ -1559,7 +1559,7 @@ The :class:`FileType` factory creates objects that can be passed to the type argument of :meth:`ArgumentParser.add_argument`. Arguments that have - :class:`FileType` objects as their type will open command-line args as files + :class:`FileType` objects as their type will open command-line arguments as files with the requested modes and buffer sizes: >>> parser = argparse.ArgumentParser() @@ -1673,7 +1673,7 @@ .. method:: ArgumentParser.set_defaults(**kwargs) Most of the time, the attributes of the object returned by :meth:`parse_args` - will be fully determined by inspecting the command-line args and the argument + will be fully determined by inspecting the command-line arguments and the argument actions. :meth:`set_defaults` allows some additional attributes that are determined without any inspection of the command line to be added:: diff --git a/Doc/library/email.policy.rst b/Doc/library/email.policy.rst --- a/Doc/library/email.policy.rst +++ b/Doc/library/email.policy.rst @@ -4,7 +4,7 @@ .. module:: email.policy :synopsis: Controlling the parsing and generating of messages -.. versionadded: 3.3 +.. versionadded:: 3.3 The :mod:`email` package's prime focus is the handling of email messages as diff --git a/Lib/distutils/tests/test_build_py.py b/Lib/distutils/tests/test_build_py.py --- a/Lib/distutils/tests/test_build_py.py +++ b/Lib/distutils/tests/test_build_py.py @@ -10,7 +10,7 @@ from distutils.errors import DistutilsFileError from distutils.tests import support -from test.support import run_unittest, create_empty_file +from test.support import run_unittest class BuildPyTestCase(support.TempdirManager, @@ -71,11 +71,11 @@ # create the distribution files. sources = self.mkdtemp() - create_empty_file(os.path.join(sources, "__init__.py")) + open(os.path.join(sources, "__init__.py"), "w").close() testdir = os.path.join(sources, "doc") os.mkdir(testdir) - create_empty_file(os.path.join(testdir, "testfile")) + open(os.path.join(testdir, "testfile"), "w").close() os.chdir(sources) old_stdout = sys.stdout diff --git a/Lib/packaging/tests/test_pypi_xmlrpc.py b/Lib/packaging/tests/test_pypi_xmlrpc.py --- a/Lib/packaging/tests/test_pypi_xmlrpc.py +++ b/Lib/packaging/tests/test_pypi_xmlrpc.py @@ -3,18 +3,14 @@ from packaging.pypi.xmlrpc import Client, InvalidSearchField, ProjectNotFound from packaging.tests import unittest +from packaging.tests.support import fake_dec try: import threading from packaging.tests.pypi_server import use_xmlrpc_server except ImportError: threading = None - def use_xmlrpc_server(): - def _use(func): - def __use(*args, **kw): - return func(*args, **kw) - return __use - return _use + use_xmlrpc_server = fake_dec @unittest.skipIf(threading is None, "Needs threading") diff --git a/Lib/shlex.py b/Lib/shlex.py --- a/Lib/shlex.py +++ b/Lib/shlex.py @@ -276,7 +276,7 @@ return list(lex) -_find_unsafe = re.compile(r'[^\w\d@%_\-\+=:,\./]').search +_find_unsafe = re.compile(r'[^\w@%\-\+=:,\./]', re.ASCII).search def quote(s): """Return a shell-escaped version of the string *s*.""" diff --git a/Lib/shutil.py b/Lib/shutil.py --- a/Lib/shutil.py +++ b/Lib/shutil.py @@ -267,7 +267,7 @@ names = [] try: names = os.listdir(path) - except os.error as err: + except os.error: onerror(os.listdir, path, sys.exc_info()) for name in names: fullname = os.path.join(path, name) @@ -280,7 +280,7 @@ else: try: os.remove(fullname) - except os.error as err: + except os.error: onerror(os.remove, fullname, sys.exc_info()) try: os.rmdir(path) @@ -323,7 +323,7 @@ raise Error("Destination path '%s' already exists" % real_dst) try: os.rename(src, real_dst) - except OSError as exc: + except OSError: if os.path.isdir(src): if _destinsrc(src, dst): raise Error("Cannot move a directory '%s' into itself '%s'." % (src, dst)) diff --git a/Lib/test/test_shlex.py b/Lib/test/test_shlex.py --- a/Lib/test/test_shlex.py +++ b/Lib/test/test_shlex.py @@ -176,7 +176,8 @@ def testQuote(self): safeunquoted = string.ascii_letters + string.digits + '@%_-+=:,./' - unsafe = '"`$\\!' + unicode_sample = '\xe9\xe0\xdf' # e + acute accent, a + grave, sharp s + unsafe = '"`$\\!' + unicode_sample self.assertEqual(shlex.quote(''), "''") self.assertEqual(shlex.quote(safeunquoted), safeunquoted) diff --git a/Makefile.pre.in b/Makefile.pre.in --- a/Makefile.pre.in +++ b/Makefile.pre.in @@ -1330,8 +1330,8 @@ # Find files with funny names funny: - find $(DISTDIRS) \ - -o -type d \ + find $(SUBDIRS) $(SUBDIRSTOO) \ + -type d \ -o -name '*.[chs]' \ -o -name '*.py' \ -o -name '*.pyw' \ @@ -1359,7 +1359,7 @@ -o -name .hgignore \ -o -name .bzrignore \ -o -name MANIFEST \ - -o -print + -print # Perform some verification checks on any modified files. patchcheck: -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Aug 12 18:04:01 2011 From: python-checkins at python.org (eric.araujo) Date: Fri, 12 Aug 2011 18:04:01 +0200 Subject: [Python-checkins] =?utf8?b?Y3B5dGhvbiAobWVyZ2UgMy4yIC0+IDMuMik6?= =?utf8?q?_Branch_merge?= Message-ID: http://hg.python.org/cpython/rev/dfd39fec4758 changeset: 71833:dfd39fec4758 branch: 3.2 parent: 71817:c47bc1349e61 parent: 71827:d79b2ce01438 user: ?ric Araujo date: Fri Aug 12 17:40:25 2011 +0200 summary: Branch merge files: Doc/library/argparse.rst | 38 ++++++++++++++-------------- Lib/test/test_pipes.py | 3 +- Makefile.pre.in | 4 +- 3 files changed, 23 insertions(+), 22 deletions(-) diff --git a/Doc/library/argparse.rst b/Doc/library/argparse.rst --- a/Doc/library/argparse.rst +++ b/Doc/library/argparse.rst @@ -2,7 +2,7 @@ =============================================================================== .. module:: argparse - :synopsis: Command-line option and argument-parsing library. + :synopsis: Command-line option and argument parsing library. .. moduleauthor:: Steven Bethard .. sectionauthor:: Steven Bethard @@ -107,7 +107,7 @@ Parsing arguments ^^^^^^^^^^^^^^^^^ -:class:`ArgumentParser` parses args through the +:class:`ArgumentParser` parses arguments through the :meth:`~ArgumentParser.parse_args` method. This will inspect the command line, convert each arg to the appropriate type and then invoke the appropriate action. In most cases, this means a simple :class:`Namespace` object will be built up from @@ -118,7 +118,7 @@ In a script, :meth:`~ArgumentParser.parse_args` will typically be called with no arguments, and the :class:`ArgumentParser` will automatically determine the -command-line args from :data:`sys.argv`. +command-line arguments from :data:`sys.argv`. ArgumentParser objects @@ -650,11 +650,11 @@ action ^^^^^^ -:class:`ArgumentParser` objects associate command-line args with actions. These -actions can do just about anything with the command-line args associated with +:class:`ArgumentParser` objects associate command-line arguments with actions. These +actions can do just about anything with the command-line arguments associated with them, though most actions simply add an attribute to the object returned by :meth:`~ArgumentParser.parse_args`. The ``action`` keyword argument specifies -how the command-line args should be handled. The supported actions are: +how the command-line arguments should be handled. The supported actions are: * ``'store'`` - This just stores the argument's value. This is the default action. For example:: @@ -726,8 +726,8 @@ :meth:`~ArgumentParser.parse_args`. Most actions add an attribute to this object. -* ``values`` - The associated command-line args, with any type-conversions - applied. (Type-conversions are specified with the type_ keyword argument to +* ``values`` - The associated command-line arguments, with any type conversions + applied. (Type conversions are specified with the type_ keyword argument to :meth:`~ArgumentParser.add_argument`. * ``option_string`` - The option string that was used to invoke this action. @@ -759,7 +759,7 @@ different number of command-line arguments with a single action. The supported values are: -* N (an integer). N args from the command line will be gathered together into a +* N (an integer). N arguments from the command line will be gathered together into a list. For example:: >>> parser = argparse.ArgumentParser() @@ -803,7 +803,7 @@ Namespace(infile=<_io.TextIOWrapper name='' encoding='UTF-8'>, outfile=<_io.TextIOWrapper name='' encoding='UTF-8'>) -* ``'*'``. All command-line args present are gathered into a list. Note that +* ``'*'``. All command-line arguments present are gathered into a list. Note that it generally doesn't make much sense to have more than one positional argument with ``nargs='*'``, but multiple optional arguments with ``nargs='*'`` is possible. For example:: @@ -827,7 +827,7 @@ usage: PROG [-h] foo [foo ...] PROG: error: too few arguments -If the ``nargs`` keyword argument is not provided, the number of args consumed +If the ``nargs`` keyword argument is not provided, the number of arguments consumed is determined by the action_. Generally this means a single command-line arg will be consumed and a single item (not a list) will be produced. @@ -845,7 +845,7 @@ * When :meth:`~ArgumentParser.add_argument` is called with option strings (like ``-f`` or ``--foo``) and ``nargs='?'``. This creates an optional - argument that can be followed by zero or one command-line args. + argument that can be followed by zero or one command-line arguments. When parsing the command line, if the option string is encountered with no command-line arg following it, the value of ``const`` will be assumed instead. See the nargs_ description for examples. @@ -895,11 +895,11 @@ type ^^^^ -By default, :class:`ArgumentParser` objects read command-line args in as simple +By default, :class:`ArgumentParser` objects read command-line arguments in as simple strings. However, quite often the command-line string should instead be interpreted as another type, like a :class:`float` or :class:`int`. The ``type`` keyword argument of :meth:`~ArgumentParser.add_argument` allows any -necessary type-checking and type-conversions to be performed. Common built-in +necessary type-checking and type conversions to be performed. Common built-in types and functions can be used directly as the value of the ``type`` argument:: >>> parser = argparse.ArgumentParser() @@ -919,7 +919,7 @@ Namespace(bar=<_io.TextIOWrapper name='out.txt' encoding='UTF-8'>) ``type=`` can take any callable that takes a single string argument and returns -the type-converted value:: +the converted value:: >>> def perfect_square(string): ... value = int(string) @@ -954,7 +954,7 @@ choices ^^^^^^^ -Some command-line args should be selected from a restricted set of values. +Some command-line arguments should be selected from a restricted set of values. These can be handled by passing a container object as the ``choices`` keyword argument to :meth:`~ArgumentParser.add_argument`. When the command line is parsed, arg values will be checked, and an error message will be displayed if @@ -1312,7 +1312,7 @@ Beyond ``sys.argv`` ^^^^^^^^^^^^^^^^^^^ -Sometimes it may be useful to have an ArgumentParser parse args other than those +Sometimes it may be useful to have an ArgumentParser parse arguments other than those of :data:`sys.argv`. This can be accomplished by passing a list of strings to :meth:`~ArgumentParser.parse_args`. This is useful for testing at the interactive prompt:: @@ -1540,7 +1540,7 @@ The :class:`FileType` factory creates objects that can be passed to the type argument of :meth:`ArgumentParser.add_argument`. Arguments that have - :class:`FileType` objects as their type will open command-line args as files + :class:`FileType` objects as their type will open command-line arguments as files with the requested modes and buffer sizes: >>> parser = argparse.ArgumentParser() @@ -1654,7 +1654,7 @@ .. method:: ArgumentParser.set_defaults(**kwargs) Most of the time, the attributes of the object returned by :meth:`parse_args` - will be fully determined by inspecting the command-line args and the argument + will be fully determined by inspecting the command-line arguments and the argument actions. :meth:`set_defaults` allows some additional attributes that are determined without any inspection of the command line to be added:: diff --git a/Lib/test/test_pipes.py b/Lib/test/test_pipes.py --- a/Lib/test/test_pipes.py +++ b/Lib/test/test_pipes.py @@ -81,7 +81,8 @@ def testQuoting(self): safeunquoted = string.ascii_letters + string.digits + '@%_-+=:,./' - unsafe = '"`$\\!' + unicode_sample = '\xe9\xe0\xdf' # e + acute accent, a + grave, sharp s + unsafe = '"`$\\!' + unicode_sample self.assertEqual(pipes.quote(''), "''") self.assertEqual(pipes.quote(safeunquoted), safeunquoted) diff --git a/Makefile.pre.in b/Makefile.pre.in --- a/Makefile.pre.in +++ b/Makefile.pre.in @@ -1283,7 +1283,7 @@ # Find files with funny names funny: - find $(DISTDIRS) \ + find $(SUBDIRS) $(SUBDIRSTOO) \ -name .svn -prune \ -o -type d \ -o -name '*.[chs]' \ @@ -1313,7 +1313,7 @@ -o -name .hgignore \ -o -name .bzrignore \ -o -name MANIFEST \ - -o -print + -print # Perform some verification checks on any modified files. patchcheck: -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Aug 12 18:04:02 2011 From: python-checkins at python.org (eric.araujo) Date: Fri, 12 Aug 2011 18:04:02 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=283=2E2=29=3A_patchcheck=3A_d?= =?utf8?q?on=E2=80=99t_talk_about_the_test_suite_when_no_code_file_were_ch?= =?utf8?q?anged=2E?= Message-ID: http://hg.python.org/cpython/rev/f1859c9d1086 changeset: 71834:f1859c9d1086 branch: 3.2 user: ?ric Araujo date: Fri Aug 12 17:50:08 2011 +0200 summary: patchcheck: don?t talk about the test suite when no code file were changed. The line about the test suite will still get printed for changes in Tools for example, which aren?t covered by the test suite, but it?s not a big deal IMO. files: Tools/scripts/patchcheck.py | 5 +++-- 1 files changed, 3 insertions(+), 2 deletions(-) diff --git a/Tools/scripts/patchcheck.py b/Tools/scripts/patchcheck.py --- a/Tools/scripts/patchcheck.py +++ b/Tools/scripts/patchcheck.py @@ -157,8 +157,9 @@ reported_news(special_files) # Test suite run and passed. - print() - print("Did you run the test suite?") + if python_files or c_files: + print() + print("Did you run the test suite?") if __name__ == '__main__': -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Aug 12 18:04:03 2011 From: python-checkins at python.org (eric.araujo) Date: Fri, 12 Aug 2011 18:04:03 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=28merge_3=2E2_-=3E_default=29?= =?utf8?q?=3A_Merge_3=2E2?= Message-ID: http://hg.python.org/cpython/rev/d403eaec64df changeset: 71835:d403eaec64df parent: 71832:5eac01a77038 parent: 71834:f1859c9d1086 user: ?ric Araujo date: Fri Aug 12 18:03:30 2011 +0200 summary: Merge 3.2 files: Tools/scripts/patchcheck.py | 5 +++-- 1 files changed, 3 insertions(+), 2 deletions(-) diff --git a/Tools/scripts/patchcheck.py b/Tools/scripts/patchcheck.py --- a/Tools/scripts/patchcheck.py +++ b/Tools/scripts/patchcheck.py @@ -141,8 +141,9 @@ reported_news(special_files) # Test suite run and passed. - print() - print("Did you run the test suite?") + if python_files or c_files: + print() + print("Did you run the test suite?") if __name__ == '__main__': -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Aug 12 18:51:02 2011 From: python-checkins at python.org (eric.araujo) Date: Fri, 12 Aug 2011 18:51:02 +0200 Subject: [Python-checkins] =?utf8?q?devguide=3A_Fix_minor_grammar_issue?= Message-ID: http://hg.python.org/devguide/rev/69e17de6d449 changeset: 439:69e17de6d449 user: ?ric Araujo date: Fri Aug 12 18:50:07 2011 +0200 summary: Fix minor grammar issue files: coverage.rst | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/coverage.rst b/coverage.rst --- a/coverage.rst +++ b/coverage.rst @@ -125,7 +125,7 @@ ./python -m coverage report --show-missing -But one of the strengths of coverage.py is its HTML-based reports which lets +But one of the strengths of coverage.py is its HTML-based reports which let you visually see what lines of code were not tested:: ./python -m coverage html -i --omit="*/test/*,*/tests/*" -- Repository URL: http://hg.python.org/devguide From python-checkins at python.org Fri Aug 12 18:51:06 2011 From: python-checkins at python.org (eric.araujo) Date: Fri, 12 Aug 2011 18:51:06 +0200 Subject: [Python-checkins] =?utf8?q?devguide=3A_Mention_test=2Eautotest_fo?= =?utf8?q?r_the_benefit_of_users_without_a_command_line?= Message-ID: http://hg.python.org/devguide/rev/c18fd0ee23ed changeset: 440:c18fd0ee23ed user: ?ric Araujo date: Fri Aug 12 18:50:39 2011 +0200 summary: Mention test.autotest for the benefit of users without a command line files: runtests.rst | 5 +++++ 1 files changed, 5 insertions(+), 0 deletions(-) diff --git a/runtests.rst b/runtests.rst --- a/runtests.rst +++ b/runtests.rst @@ -17,6 +17,11 @@ ./python -m test +If you don't have easy access to a command line, you can run the test suite from +a Python or IDLE shell:: + + >>> from test import autotest + This will run the majority of tests, but exclude a small portion of them; these excluded tests use special kinds of resources: for example, accessing the Internet, or trying to play a sound or to display a graphical interface on -- Repository URL: http://hg.python.org/devguide From python-checkins at python.org Fri Aug 12 18:52:35 2011 From: python-checkins at python.org (eric.araujo) Date: Fri, 12 Aug 2011 18:52:35 +0200 Subject: [Python-checkins] =?utf8?q?peps=3A_Fix_copy-paste?= Message-ID: http://hg.python.org/peps/rev/f63ebf90f159 changeset: 3925:f63ebf90f159 user: ?ric Araujo date: Fri Aug 12 18:52:01 2011 +0200 summary: Fix copy-paste files: pep-0280.txt | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/pep-0280.txt b/pep-0280.txt --- a/pep-0280.txt +++ b/pep-0280.txt @@ -13,7 +13,7 @@ Deferral While this PEP is a nice idea, no-one has yet emerged to do the work of - hashing out the differences between this PEP, PEP 267 and PEP 280. + hashing out the differences between this PEP, PEP 266 and PEP 267. Hence, it is being deferred. -- Repository URL: http://hg.python.org/peps From tjreedy at udel.edu Fri Aug 12 19:02:02 2011 From: tjreedy at udel.edu (Terry Reedy) Date: Fri, 12 Aug 2011 13:02:02 -0400 Subject: [Python-checkins] cpython (3.2): Use real word in English text (i.e. not code) In-Reply-To: References: Message-ID: <4E455C8A.4030104@udel.edu> On 8/12/2011 12:03 PM, eric.araujo wrote: > http://hg.python.org/cpython/rev/d79b2ce01438 > changeset: 71827:d79b2ce01438 > branch: 3.2 > user: ?ric Araujo > date: Wed Aug 10 04:19:03 2011 +0200 > summary: > Use real word in English text (i.e. not code) I agree that 'arg' for 'argument is email/twitter-speak, not proper document prose. > diff --git a/Doc/library/argparse.rst b/Doc/library/argparse.rst > - :synopsis: Command-line option and argument-parsing library. > + :synopsis: Command-line option and argument parsing library. However, 'argument-parsing' could/should be left hyphenated as a compound adjective for the same reason 'command-line' is. > -:class:`ArgumentParser` parses args through the > +:class:`ArgumentParser` parses arguments through the > :meth:`~ArgumentParser.parse_args` method. This will inspect the command line, > convert each arg to the appropriate type and then invoke the appropriate action. An arg you missed > -If the ``nargs`` keyword argument is not provided, the number of args consumed > +If the ``nargs`` keyword argument is not provided, the number of arguments consumed > is determined by the action_. Generally this means a single command-line arg Another. Args are as pesky as bugs. > - argument that can be followed by zero or one command-line args. > + argument that can be followed by zero or one command-line arguments. > When parsing the command line, if the option string is encountered with no > command-line arg following it, the value of ``const`` will be assumed instead. arg > -necessary type-checking and type-conversions to be performed. Common built-in > +necessary type-checking and type conversions to be performed. Common built-in Right, 'type-conversion' is not an compound adjective here. > -Some command-line args should be selected from a restricted set of values. > +Some command-line arguments should be selected from a restricted set of values. > These can be handled by passing a container object as the ``choices`` keyword > argument to :meth:`~ArgumentParser.add_argument`. When the command line is > parsed, arg values will be checked, and an error message will be displayed if arg It looks you did a global / args/ arguments/ but not / arg / argument /. Terry From python-checkins at python.org Fri Aug 12 19:12:13 2011 From: python-checkins at python.org (sandro.tosi) Date: Fri, 12 Aug 2011 19:12:13 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=282=2E7=29=3A_English_fixup?= =?utf8?q?=2C_from_Mike_MacCana_on_docs=40?= Message-ID: http://hg.python.org/cpython/rev/ba701fb169d1 changeset: 71836:ba701fb169d1 branch: 2.7 parent: 71813:0fbd44e3f342 user: Sandro Tosi date: Fri Aug 12 19:11:24 2011 +0200 summary: English fixup, from Mike MacCana on docs@ files: Doc/library/urllib.rst | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Doc/library/urllib.rst b/Doc/library/urllib.rst --- a/Doc/library/urllib.rst +++ b/Doc/library/urllib.rst @@ -23,7 +23,7 @@ instead of filenames. Some restrictions apply --- it can only open URLs for reading, and no seek operations are available. -.. warning:: When opening HTTPS URLs, it is not attempted to validate the +.. warning:: When opening HTTPS URLs, it does not attempt to validate the server certificate. Use at your own risk! -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Aug 12 19:33:52 2011 From: python-checkins at python.org (sandro.tosi) Date: Fri, 12 Aug 2011 19:33:52 +0200 Subject: [Python-checkins] =?utf8?b?Y3B5dGhvbiAoMi43KTogaXQncyAncmF0aGVy?= =?utf8?q?_than=27=3B_reported_by_James_Bateman_on_docs=40?= Message-ID: http://hg.python.org/cpython/rev/7d9024b7a001 changeset: 71837:7d9024b7a001 branch: 2.7 user: Sandro Tosi date: Fri Aug 12 19:31:15 2011 +0200 summary: it's 'rather than'; reported by James Bateman on docs@ files: Doc/library/curses.rst | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Doc/library/curses.rst b/Doc/library/curses.rst --- a/Doc/library/curses.rst +++ b/Doc/library/curses.rst @@ -652,7 +652,7 @@ .. note:: - A *character* means a C character (an ASCII code), rather then a Python + A *character* means a C character (an ASCII code), rather than a Python character (a string of length 1). (This note is true whenever the documentation mentions a character.) The built-in :func:`ord` is handy for conveying strings to codes. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Aug 12 19:33:52 2011 From: python-checkins at python.org (sandro.tosi) Date: Fri, 12 Aug 2011 19:33:52 +0200 Subject: [Python-checkins] =?utf8?b?Y3B5dGhvbiAoMy4yKTogaXQncyAncmF0aGVy?= =?utf8?q?_than=27=3B_reported_by_James_Bateman_on_docs=40?= Message-ID: http://hg.python.org/cpython/rev/aa6c073c2597 changeset: 71838:aa6c073c2597 branch: 3.2 parent: 71834:f1859c9d1086 user: Sandro Tosi date: Fri Aug 12 19:31:32 2011 +0200 summary: it's 'rather than'; reported by James Bateman on docs@ files: Doc/library/curses.rst | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Doc/library/curses.rst b/Doc/library/curses.rst --- a/Doc/library/curses.rst +++ b/Doc/library/curses.rst @@ -649,7 +649,7 @@ .. note:: - A *character* means a C character (an ASCII code), rather then a Python + A *character* means a C character (an ASCII code), rather than a Python character (a string of length 1). (This note is true whenever the documentation mentions a character.) The built-in :func:`ord` is handy for conveying strings to codes. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Aug 12 19:33:53 2011 From: python-checkins at python.org (sandro.tosi) Date: Fri, 12 Aug 2011 19:33:53 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=28merge_3=2E2_-=3E_default=29?= =?utf8?q?=3A_merge_with_3=2E2?= Message-ID: http://hg.python.org/cpython/rev/79764f08ffd4 changeset: 71839:79764f08ffd4 parent: 71835:d403eaec64df parent: 71838:aa6c073c2597 user: Sandro Tosi date: Fri Aug 12 19:31:56 2011 +0200 summary: merge with 3.2 files: Doc/library/curses.rst | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Doc/library/curses.rst b/Doc/library/curses.rst --- a/Doc/library/curses.rst +++ b/Doc/library/curses.rst @@ -649,7 +649,7 @@ .. note:: - A *character* means a C character (an ASCII code), rather then a Python + A *character* means a C character (an ASCII code), rather than a Python character (a string of length 1). (This note is true whenever the documentation mentions a character.) The built-in :func:`ord` is handy for conveying strings to codes. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Aug 12 19:38:53 2011 From: python-checkins at python.org (sandro.tosi) Date: Fri, 12 Aug 2011 19:38:53 +0200 Subject: [Python-checkins] =?utf8?q?peps=3A_typo?= Message-ID: http://hg.python.org/peps/rev/71dca2ca33ea changeset: 3926:71dca2ca33ea user: Sandro Tosi date: Fri Aug 12 19:38:10 2011 +0200 summary: typo files: pep-3154.txt | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/pep-3154.txt b/pep-3154.txt --- a/pep-3154.txt +++ b/pep-3154.txt @@ -15,7 +15,7 @@ Abstract ======== -Data serialized using the pickle module must be portable accross Python +Data serialized using the pickle module must be portable across Python versions. It should also support the latest language features as well as implementation-specific features. For this reason, the pickle module knows about several protocols (currently numbered from 0 to 3), each of which -- Repository URL: http://hg.python.org/peps From python-checkins at python.org Fri Aug 12 19:56:01 2011 From: python-checkins at python.org (eric.araujo) Date: Fri, 12 Aug 2011 19:56:01 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=283=2E2=29=3A_Update_crlf_and?= =?utf8?q?_lfcr_scripts_for_3=2Ex_bytes_semantics_=28=2312032=29=2E?= Message-ID: http://hg.python.org/cpython/rev/47ffb957921d changeset: 71840:47ffb957921d branch: 3.2 parent: 71834:f1859c9d1086 user: ?ric Araujo date: Fri Aug 12 19:40:05 2011 +0200 summary: Update crlf and lfcr scripts for 3.x bytes semantics (#12032). Changes to crlf originally by Victor Stinner for 3.3, copied to lfcr by me. Manually tested. files: Tools/scripts/crlf.py | 12 ++++++------ Tools/scripts/lfcr.py | 12 ++++++------ 2 files changed, 12 insertions(+), 12 deletions(-) diff --git a/Tools/scripts/crlf.py b/Tools/scripts/crlf.py --- a/Tools/scripts/crlf.py +++ b/Tools/scripts/crlf.py @@ -8,16 +8,16 @@ if os.path.isdir(filename): print(filename, "Directory!") continue - data = open(filename, "rb").read() - if '\0' in data: + with open(filename, "rb") as f: + data = f.read() + if b'\0' in data: print(filename, "Binary!") continue - newdata = data.replace("\r\n", "\n") + newdata = data.replace(b"\r\n", b"\n") if newdata != data: print(filename) - f = open(filename, "wb") - f.write(newdata) - f.close() + with open(filename, "wb") as f: + f.write(newdata) if __name__ == '__main__': main() diff --git a/Tools/scripts/lfcr.py b/Tools/scripts/lfcr.py --- a/Tools/scripts/lfcr.py +++ b/Tools/scripts/lfcr.py @@ -9,16 +9,16 @@ if os.path.isdir(filename): print(filename, "Directory!") continue - data = open(filename, "rb").read() - if '\0' in data: + with open(filename, "rb") as f: + data = f.read() + if b'\0' in data: print(filename, "Binary!") continue - newdata = re.sub("\r?\n", "\r\n", data) + newdata = re.sub(b"\r?\n", b"\r\n", data) if newdata != data: print(filename) - f = open(filename, "wb") - f.write(newdata) - f.close() + with open(filename, "wb") as f: + f.write(newdata) if __name__ == '__main__': main() -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Aug 12 19:56:02 2011 From: python-checkins at python.org (eric.araujo) Date: Fri, 12 Aug 2011 19:56:02 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Clean_up_test=5Fshutil=2C_t?= =?utf8?q?o_facilitate_upcoming_improvements_=28=2312721=29=2E?= Message-ID: http://hg.python.org/cpython/rev/d52a1199d3f0 changeset: 71841:d52a1199d3f0 parent: 71835:d403eaec64df user: ?ric Araujo date: Fri Aug 12 19:51:35 2011 +0200 summary: Clean up test_shutil, to facilitate upcoming improvements (#12721). The tests now have two convenience functions to wrap os.path.join, open and read or write instead of four or six slightly different functions. The new functions accept a tuple of path segments but not a list anymore, as it makes no sense to use a list here; I have also removed the default value for the contents in write_file, as I find it better to have the contents at the call site. For simple open then read/write calls, I have left the usual idiom (with open + read/write), as it is short and readable enough. I?ve also changed some convoluted cleanup code to just use rmtree, and removed dubious LBYL os.path.exists checks. The tests still pass on my machine, and leave no file in $TMP. test_shutil is not as clean as it could be, but I?ll stop here. Initial patch provided by Hynek Schlawack, in preparation for a new feature with new tests in #12715. files: Lib/test/test_shutil.py | 163 +++++++++++---------------- 1 files changed, 68 insertions(+), 95 deletions(-) diff --git a/Lib/test/test_shutil.py b/Lib/test/test_shutil.py --- a/Lib/test/test_shutil.py +++ b/Lib/test/test_shutil.py @@ -59,6 +59,31 @@ os.rename = builtin_rename return wrap +def write_file(path, content, binary=False): + """Write *content* to a file located at *path*. + + If *path* is a tuple instead of a string, os.path.join will be used to + make a path. If *binary* is true, the file will be opened in binary + mode. + """ + if isinstance(path, tuple): + path = os.path.join(*path) + with open(path, 'wb' if binary else 'w') as fp: + fp.write(content) + +def read_file(path, binary=False): + """Return contents from a file located at *path*. + + If *path* is a tuple instead of a string, os.path.join will be used to + make a path. If *binary* is true, the file will be opened in binary + mode. + """ + if isinstance(path, tuple): + path = os.path.join(*path) + with open(path, 'rb' if binary else 'r') as fp: + return fp.read() + + class TestShutil(unittest.TestCase): def setUp(self): @@ -71,19 +96,6 @@ d = self.tempdirs.pop() shutil.rmtree(d, os.name in ('nt', 'cygwin')) - def write_file(self, path, content='xxx'): - """Writes a file in the given path. - - - path can be a string or a sequence. - """ - if isinstance(path, (list, tuple)): - path = os.path.join(*path) - f = open(path, 'w') - try: - f.write(content) - finally: - f.close() def mkdtemp(self): """Create a temporary directory that will be cleaned up. @@ -159,77 +171,42 @@ self.assertRaises(OSError, shutil.rmtree, path) os.remove(path) - def _write_data(self, path, data): - f = open(path, "w") - f.write(data) - f.close() - def test_copytree_simple(self): - - def read_data(path): - f = open(path) - data = f.read() - f.close() - return data - src_dir = tempfile.mkdtemp() dst_dir = os.path.join(tempfile.mkdtemp(), 'destination') - self._write_data(os.path.join(src_dir, 'test.txt'), '123') + self.addCleanup(shutil.rmtree, src_dir) + self.addCleanup(shutil.rmtree, os.path.dirname(dst_dir)) + write_file((src_dir, 'test.txt'), '123') os.mkdir(os.path.join(src_dir, 'test_dir')) - self._write_data(os.path.join(src_dir, 'test_dir', 'test.txt'), '456') + write_file((src_dir, 'test_dir', 'test.txt'), '456') - try: - shutil.copytree(src_dir, dst_dir) - self.assertTrue(os.path.isfile(os.path.join(dst_dir, 'test.txt'))) - self.assertTrue(os.path.isdir(os.path.join(dst_dir, 'test_dir'))) - self.assertTrue(os.path.isfile(os.path.join(dst_dir, 'test_dir', - 'test.txt'))) - actual = read_data(os.path.join(dst_dir, 'test.txt')) - self.assertEqual(actual, '123') - actual = read_data(os.path.join(dst_dir, 'test_dir', 'test.txt')) - self.assertEqual(actual, '456') - finally: - for path in ( - os.path.join(src_dir, 'test.txt'), - os.path.join(dst_dir, 'test.txt'), - os.path.join(src_dir, 'test_dir', 'test.txt'), - os.path.join(dst_dir, 'test_dir', 'test.txt'), - ): - if os.path.exists(path): - os.remove(path) - for path in (src_dir, - os.path.dirname(dst_dir) - ): - if os.path.exists(path): - shutil.rmtree(path) + shutil.copytree(src_dir, dst_dir) + self.assertTrue(os.path.isfile(os.path.join(dst_dir, 'test.txt'))) + self.assertTrue(os.path.isdir(os.path.join(dst_dir, 'test_dir'))) + self.assertTrue(os.path.isfile(os.path.join(dst_dir, 'test_dir', + 'test.txt'))) + actual = read_file((dst_dir, 'test.txt')) + self.assertEqual(actual, '123') + actual = read_file((dst_dir, 'test_dir', 'test.txt')) + self.assertEqual(actual, '456') def test_copytree_with_exclude(self): - - def read_data(path): - f = open(path) - data = f.read() - f.close() - return data - # creating data join = os.path.join exists = os.path.exists src_dir = tempfile.mkdtemp() try: dst_dir = join(tempfile.mkdtemp(), 'destination') - self._write_data(join(src_dir, 'test.txt'), '123') - self._write_data(join(src_dir, 'test.tmp'), '123') + write_file((src_dir, 'test.txt'), '123') + write_file((src_dir, 'test.tmp'), '123') os.mkdir(join(src_dir, 'test_dir')) - self._write_data(join(src_dir, 'test_dir', 'test.txt'), '456') + write_file((src_dir, 'test_dir', 'test.txt'), '456') os.mkdir(join(src_dir, 'test_dir2')) - self._write_data(join(src_dir, 'test_dir2', 'test.txt'), '456') + write_file((src_dir, 'test_dir2', 'test.txt'), '456') os.mkdir(join(src_dir, 'test_dir2', 'subdir')) os.mkdir(join(src_dir, 'test_dir2', 'subdir2')) - self._write_data(join(src_dir, 'test_dir2', 'subdir', 'test.txt'), - '456') - self._write_data(join(src_dir, 'test_dir2', 'subdir2', 'test.py'), - '456') - + write_file((src_dir, 'test_dir2', 'subdir', 'test.txt'), '456') + write_file((src_dir, 'test_dir2', 'subdir2', 'test.py'), '456') # testing glob-like patterns try: @@ -237,21 +214,19 @@ shutil.copytree(src_dir, dst_dir, ignore=patterns) # checking the result: some elements should not be copied self.assertTrue(exists(join(dst_dir, 'test.txt'))) - self.assertTrue(not exists(join(dst_dir, 'test.tmp'))) - self.assertTrue(not exists(join(dst_dir, 'test_dir2'))) + self.assertFalse(exists(join(dst_dir, 'test.tmp'))) + self.assertFalse(exists(join(dst_dir, 'test_dir2'))) finally: - if os.path.exists(dst_dir): - shutil.rmtree(dst_dir) + shutil.rmtree(dst_dir) try: patterns = shutil.ignore_patterns('*.tmp', 'subdir*') shutil.copytree(src_dir, dst_dir, ignore=patterns) # checking the result: some elements should not be copied - self.assertTrue(not exists(join(dst_dir, 'test.tmp'))) - self.assertTrue(not exists(join(dst_dir, 'test_dir2', 'subdir2'))) - self.assertTrue(not exists(join(dst_dir, 'test_dir2', 'subdir'))) + self.assertFalse(exists(join(dst_dir, 'test.tmp'))) + self.assertFalse(exists(join(dst_dir, 'test_dir2', 'subdir2'))) + self.assertFalse(exists(join(dst_dir, 'test_dir2', 'subdir'))) finally: - if os.path.exists(dst_dir): - shutil.rmtree(dst_dir) + shutil.rmtree(dst_dir) # testing callable-style try: @@ -270,13 +245,12 @@ shutil.copytree(src_dir, dst_dir, ignore=_filter) # checking the result: some elements should not be copied - self.assertTrue(not exists(join(dst_dir, 'test_dir2', 'subdir2', - 'test.py'))) - self.assertTrue(not exists(join(dst_dir, 'test_dir2', 'subdir'))) + self.assertFalse(exists(join(dst_dir, 'test_dir2', 'subdir2', + 'test.py'))) + self.assertFalse(exists(join(dst_dir, 'test_dir2', 'subdir'))) finally: - if os.path.exists(dst_dir): - shutil.rmtree(dst_dir) + shutil.rmtree(dst_dir) finally: shutil.rmtree(src_dir) shutil.rmtree(os.path.dirname(dst_dir)) @@ -371,9 +345,9 @@ src_dir = self.mkdtemp() dst_dir = os.path.join(self.mkdtemp(), 'destination') - self._write_data(os.path.join(src_dir, 'test.txt'), '123') + write_file((src_dir, 'test.txt'), '123') os.mkdir(os.path.join(src_dir, 'test_dir')) - self._write_data(os.path.join(src_dir, 'test_dir', 'test.txt'), '456') + write_file((src_dir, 'test_dir', 'test.txt'), '456') copied = [] def _copy(src, dst): @@ -390,7 +364,7 @@ dst_dir = os.path.join(self.mkdtemp(), 'destination') os.symlink('IDONTEXIST', os.path.join(src_dir, 'test.txt')) os.mkdir(os.path.join(src_dir, 'test_dir')) - self._write_data(os.path.join(src_dir, 'test_dir', 'test.txt'), '456') + write_file((src_dir, 'test_dir', 'test.txt'), '456') self.assertRaises(Error, shutil.copytree, src_dir, dst_dir) # a dangling symlink is ignored with the proper flag @@ -406,7 +380,7 @@ def _copy_file(self, method): fname = 'test.txt' tmpdir = self.mkdtemp() - self.write_file([tmpdir, fname]) + write_file((tmpdir, fname), 'xxx') file1 = os.path.join(tmpdir, fname) tmpdir2 = self.mkdtemp() method(file1, tmpdir2) @@ -442,10 +416,10 @@ def test_make_tarball(self): # creating something to tar tmpdir = self.mkdtemp() - self.write_file([tmpdir, 'file1'], 'xxx') - self.write_file([tmpdir, 'file2'], 'xxx') + write_file((tmpdir, 'file1'), 'xxx') + write_file((tmpdir, 'file2'), 'xxx') os.mkdir(os.path.join(tmpdir, 'sub')) - self.write_file([tmpdir, 'sub', 'file3'], 'xxx') + write_file((tmpdir, 'sub', 'file3'), 'xxx') tmpdir2 = self.mkdtemp() # force shutil to create the directory @@ -492,10 +466,10 @@ tmpdir = self.mkdtemp() dist = os.path.join(tmpdir, 'dist') os.mkdir(dist) - self.write_file([dist, 'file1'], 'xxx') - self.write_file([dist, 'file2'], 'xxx') + write_file((dist, 'file1'), 'xxx') + write_file((dist, 'file2'), 'xxx') os.mkdir(os.path.join(dist, 'sub')) - self.write_file([dist, 'sub', 'file3'], 'xxx') + write_file((dist, 'sub', 'file3'), 'xxx') os.mkdir(os.path.join(dist, 'sub2')) tmpdir2 = self.mkdtemp() base_name = os.path.join(tmpdir2, 'archive') @@ -561,8 +535,8 @@ def test_make_zipfile(self): # creating something to tar tmpdir = self.mkdtemp() - self.write_file([tmpdir, 'file1'], 'xxx') - self.write_file([tmpdir, 'file2'], 'xxx') + write_file((tmpdir, 'file1'), 'xxx') + write_file((tmpdir, 'file2'), 'xxx') tmpdir2 = self.mkdtemp() # force shutil to create the directory @@ -969,8 +943,7 @@ shutil.move(self.src_dir, dst_dir) self.assertTrue(os.path.isdir(dst_dir)) finally: - if os.path.exists(dst_dir): - os.rmdir(dst_dir) + os.rmdir(dst_dir) -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Aug 12 19:56:02 2011 From: python-checkins at python.org (eric.araujo) Date: Fri, 12 Aug 2011 19:56:02 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=28merge_default_-=3E_default?= =?utf8?q?=29=3A_Branch_merge?= Message-ID: http://hg.python.org/cpython/rev/3b6655a72d1c changeset: 71842:3b6655a72d1c parent: 71839:79764f08ffd4 parent: 71841:d52a1199d3f0 user: ?ric Araujo date: Fri Aug 12 19:53:02 2011 +0200 summary: Branch merge files: Lib/test/test_shutil.py | 163 +++++++++++---------------- 1 files changed, 68 insertions(+), 95 deletions(-) diff --git a/Lib/test/test_shutil.py b/Lib/test/test_shutil.py --- a/Lib/test/test_shutil.py +++ b/Lib/test/test_shutil.py @@ -59,6 +59,31 @@ os.rename = builtin_rename return wrap +def write_file(path, content, binary=False): + """Write *content* to a file located at *path*. + + If *path* is a tuple instead of a string, os.path.join will be used to + make a path. If *binary* is true, the file will be opened in binary + mode. + """ + if isinstance(path, tuple): + path = os.path.join(*path) + with open(path, 'wb' if binary else 'w') as fp: + fp.write(content) + +def read_file(path, binary=False): + """Return contents from a file located at *path*. + + If *path* is a tuple instead of a string, os.path.join will be used to + make a path. If *binary* is true, the file will be opened in binary + mode. + """ + if isinstance(path, tuple): + path = os.path.join(*path) + with open(path, 'rb' if binary else 'r') as fp: + return fp.read() + + class TestShutil(unittest.TestCase): def setUp(self): @@ -71,19 +96,6 @@ d = self.tempdirs.pop() shutil.rmtree(d, os.name in ('nt', 'cygwin')) - def write_file(self, path, content='xxx'): - """Writes a file in the given path. - - - path can be a string or a sequence. - """ - if isinstance(path, (list, tuple)): - path = os.path.join(*path) - f = open(path, 'w') - try: - f.write(content) - finally: - f.close() def mkdtemp(self): """Create a temporary directory that will be cleaned up. @@ -159,77 +171,42 @@ self.assertRaises(OSError, shutil.rmtree, path) os.remove(path) - def _write_data(self, path, data): - f = open(path, "w") - f.write(data) - f.close() - def test_copytree_simple(self): - - def read_data(path): - f = open(path) - data = f.read() - f.close() - return data - src_dir = tempfile.mkdtemp() dst_dir = os.path.join(tempfile.mkdtemp(), 'destination') - self._write_data(os.path.join(src_dir, 'test.txt'), '123') + self.addCleanup(shutil.rmtree, src_dir) + self.addCleanup(shutil.rmtree, os.path.dirname(dst_dir)) + write_file((src_dir, 'test.txt'), '123') os.mkdir(os.path.join(src_dir, 'test_dir')) - self._write_data(os.path.join(src_dir, 'test_dir', 'test.txt'), '456') + write_file((src_dir, 'test_dir', 'test.txt'), '456') - try: - shutil.copytree(src_dir, dst_dir) - self.assertTrue(os.path.isfile(os.path.join(dst_dir, 'test.txt'))) - self.assertTrue(os.path.isdir(os.path.join(dst_dir, 'test_dir'))) - self.assertTrue(os.path.isfile(os.path.join(dst_dir, 'test_dir', - 'test.txt'))) - actual = read_data(os.path.join(dst_dir, 'test.txt')) - self.assertEqual(actual, '123') - actual = read_data(os.path.join(dst_dir, 'test_dir', 'test.txt')) - self.assertEqual(actual, '456') - finally: - for path in ( - os.path.join(src_dir, 'test.txt'), - os.path.join(dst_dir, 'test.txt'), - os.path.join(src_dir, 'test_dir', 'test.txt'), - os.path.join(dst_dir, 'test_dir', 'test.txt'), - ): - if os.path.exists(path): - os.remove(path) - for path in (src_dir, - os.path.dirname(dst_dir) - ): - if os.path.exists(path): - shutil.rmtree(path) + shutil.copytree(src_dir, dst_dir) + self.assertTrue(os.path.isfile(os.path.join(dst_dir, 'test.txt'))) + self.assertTrue(os.path.isdir(os.path.join(dst_dir, 'test_dir'))) + self.assertTrue(os.path.isfile(os.path.join(dst_dir, 'test_dir', + 'test.txt'))) + actual = read_file((dst_dir, 'test.txt')) + self.assertEqual(actual, '123') + actual = read_file((dst_dir, 'test_dir', 'test.txt')) + self.assertEqual(actual, '456') def test_copytree_with_exclude(self): - - def read_data(path): - f = open(path) - data = f.read() - f.close() - return data - # creating data join = os.path.join exists = os.path.exists src_dir = tempfile.mkdtemp() try: dst_dir = join(tempfile.mkdtemp(), 'destination') - self._write_data(join(src_dir, 'test.txt'), '123') - self._write_data(join(src_dir, 'test.tmp'), '123') + write_file((src_dir, 'test.txt'), '123') + write_file((src_dir, 'test.tmp'), '123') os.mkdir(join(src_dir, 'test_dir')) - self._write_data(join(src_dir, 'test_dir', 'test.txt'), '456') + write_file((src_dir, 'test_dir', 'test.txt'), '456') os.mkdir(join(src_dir, 'test_dir2')) - self._write_data(join(src_dir, 'test_dir2', 'test.txt'), '456') + write_file((src_dir, 'test_dir2', 'test.txt'), '456') os.mkdir(join(src_dir, 'test_dir2', 'subdir')) os.mkdir(join(src_dir, 'test_dir2', 'subdir2')) - self._write_data(join(src_dir, 'test_dir2', 'subdir', 'test.txt'), - '456') - self._write_data(join(src_dir, 'test_dir2', 'subdir2', 'test.py'), - '456') - + write_file((src_dir, 'test_dir2', 'subdir', 'test.txt'), '456') + write_file((src_dir, 'test_dir2', 'subdir2', 'test.py'), '456') # testing glob-like patterns try: @@ -237,21 +214,19 @@ shutil.copytree(src_dir, dst_dir, ignore=patterns) # checking the result: some elements should not be copied self.assertTrue(exists(join(dst_dir, 'test.txt'))) - self.assertTrue(not exists(join(dst_dir, 'test.tmp'))) - self.assertTrue(not exists(join(dst_dir, 'test_dir2'))) + self.assertFalse(exists(join(dst_dir, 'test.tmp'))) + self.assertFalse(exists(join(dst_dir, 'test_dir2'))) finally: - if os.path.exists(dst_dir): - shutil.rmtree(dst_dir) + shutil.rmtree(dst_dir) try: patterns = shutil.ignore_patterns('*.tmp', 'subdir*') shutil.copytree(src_dir, dst_dir, ignore=patterns) # checking the result: some elements should not be copied - self.assertTrue(not exists(join(dst_dir, 'test.tmp'))) - self.assertTrue(not exists(join(dst_dir, 'test_dir2', 'subdir2'))) - self.assertTrue(not exists(join(dst_dir, 'test_dir2', 'subdir'))) + self.assertFalse(exists(join(dst_dir, 'test.tmp'))) + self.assertFalse(exists(join(dst_dir, 'test_dir2', 'subdir2'))) + self.assertFalse(exists(join(dst_dir, 'test_dir2', 'subdir'))) finally: - if os.path.exists(dst_dir): - shutil.rmtree(dst_dir) + shutil.rmtree(dst_dir) # testing callable-style try: @@ -270,13 +245,12 @@ shutil.copytree(src_dir, dst_dir, ignore=_filter) # checking the result: some elements should not be copied - self.assertTrue(not exists(join(dst_dir, 'test_dir2', 'subdir2', - 'test.py'))) - self.assertTrue(not exists(join(dst_dir, 'test_dir2', 'subdir'))) + self.assertFalse(exists(join(dst_dir, 'test_dir2', 'subdir2', + 'test.py'))) + self.assertFalse(exists(join(dst_dir, 'test_dir2', 'subdir'))) finally: - if os.path.exists(dst_dir): - shutil.rmtree(dst_dir) + shutil.rmtree(dst_dir) finally: shutil.rmtree(src_dir) shutil.rmtree(os.path.dirname(dst_dir)) @@ -371,9 +345,9 @@ src_dir = self.mkdtemp() dst_dir = os.path.join(self.mkdtemp(), 'destination') - self._write_data(os.path.join(src_dir, 'test.txt'), '123') + write_file((src_dir, 'test.txt'), '123') os.mkdir(os.path.join(src_dir, 'test_dir')) - self._write_data(os.path.join(src_dir, 'test_dir', 'test.txt'), '456') + write_file((src_dir, 'test_dir', 'test.txt'), '456') copied = [] def _copy(src, dst): @@ -390,7 +364,7 @@ dst_dir = os.path.join(self.mkdtemp(), 'destination') os.symlink('IDONTEXIST', os.path.join(src_dir, 'test.txt')) os.mkdir(os.path.join(src_dir, 'test_dir')) - self._write_data(os.path.join(src_dir, 'test_dir', 'test.txt'), '456') + write_file((src_dir, 'test_dir', 'test.txt'), '456') self.assertRaises(Error, shutil.copytree, src_dir, dst_dir) # a dangling symlink is ignored with the proper flag @@ -406,7 +380,7 @@ def _copy_file(self, method): fname = 'test.txt' tmpdir = self.mkdtemp() - self.write_file([tmpdir, fname]) + write_file((tmpdir, fname), 'xxx') file1 = os.path.join(tmpdir, fname) tmpdir2 = self.mkdtemp() method(file1, tmpdir2) @@ -442,10 +416,10 @@ def test_make_tarball(self): # creating something to tar tmpdir = self.mkdtemp() - self.write_file([tmpdir, 'file1'], 'xxx') - self.write_file([tmpdir, 'file2'], 'xxx') + write_file((tmpdir, 'file1'), 'xxx') + write_file((tmpdir, 'file2'), 'xxx') os.mkdir(os.path.join(tmpdir, 'sub')) - self.write_file([tmpdir, 'sub', 'file3'], 'xxx') + write_file((tmpdir, 'sub', 'file3'), 'xxx') tmpdir2 = self.mkdtemp() # force shutil to create the directory @@ -492,10 +466,10 @@ tmpdir = self.mkdtemp() dist = os.path.join(tmpdir, 'dist') os.mkdir(dist) - self.write_file([dist, 'file1'], 'xxx') - self.write_file([dist, 'file2'], 'xxx') + write_file((dist, 'file1'), 'xxx') + write_file((dist, 'file2'), 'xxx') os.mkdir(os.path.join(dist, 'sub')) - self.write_file([dist, 'sub', 'file3'], 'xxx') + write_file((dist, 'sub', 'file3'), 'xxx') os.mkdir(os.path.join(dist, 'sub2')) tmpdir2 = self.mkdtemp() base_name = os.path.join(tmpdir2, 'archive') @@ -561,8 +535,8 @@ def test_make_zipfile(self): # creating something to tar tmpdir = self.mkdtemp() - self.write_file([tmpdir, 'file1'], 'xxx') - self.write_file([tmpdir, 'file2'], 'xxx') + write_file((tmpdir, 'file1'), 'xxx') + write_file((tmpdir, 'file2'), 'xxx') tmpdir2 = self.mkdtemp() # force shutil to create the directory @@ -969,8 +943,7 @@ shutil.move(self.src_dir, dst_dir) self.assertTrue(os.path.isdir(dst_dir)) finally: - if os.path.exists(dst_dir): - os.rmdir(dst_dir) + os.rmdir(dst_dir) -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Aug 12 19:56:03 2011 From: python-checkins at python.org (eric.araujo) Date: Fri, 12 Aug 2011 19:56:03 +0200 Subject: [Python-checkins] =?utf8?b?Y3B5dGhvbiAobWVyZ2UgMy4yIC0+IDMuMik6?= =?utf8?q?_Branch_merge?= Message-ID: http://hg.python.org/cpython/rev/d04a31a43a15 changeset: 71843:d04a31a43a15 branch: 3.2 parent: 71838:aa6c073c2597 parent: 71840:47ffb957921d user: ?ric Araujo date: Fri Aug 12 19:52:43 2011 +0200 summary: Branch merge files: Tools/scripts/crlf.py | 12 ++++++------ Tools/scripts/lfcr.py | 12 ++++++------ 2 files changed, 12 insertions(+), 12 deletions(-) diff --git a/Tools/scripts/crlf.py b/Tools/scripts/crlf.py --- a/Tools/scripts/crlf.py +++ b/Tools/scripts/crlf.py @@ -8,16 +8,16 @@ if os.path.isdir(filename): print(filename, "Directory!") continue - data = open(filename, "rb").read() - if '\0' in data: + with open(filename, "rb") as f: + data = f.read() + if b'\0' in data: print(filename, "Binary!") continue - newdata = data.replace("\r\n", "\n") + newdata = data.replace(b"\r\n", b"\n") if newdata != data: print(filename) - f = open(filename, "wb") - f.write(newdata) - f.close() + with open(filename, "wb") as f: + f.write(newdata) if __name__ == '__main__': main() diff --git a/Tools/scripts/lfcr.py b/Tools/scripts/lfcr.py --- a/Tools/scripts/lfcr.py +++ b/Tools/scripts/lfcr.py @@ -9,16 +9,16 @@ if os.path.isdir(filename): print(filename, "Directory!") continue - data = open(filename, "rb").read() - if '\0' in data: + with open(filename, "rb") as f: + data = f.read() + if b'\0' in data: print(filename, "Binary!") continue - newdata = re.sub("\r?\n", "\r\n", data) + newdata = re.sub(b"\r?\n", b"\r\n", data) if newdata != data: print(filename) - f = open(filename, "wb") - f.write(newdata) - f.close() + with open(filename, "wb") as f: + f.write(newdata) if __name__ == '__main__': main() -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Aug 12 19:56:04 2011 From: python-checkins at python.org (eric.araujo) Date: Fri, 12 Aug 2011 19:56:04 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=28merge_3=2E2_-=3E_default=29?= =?utf8?q?=3A_Merge_3=2E2?= Message-ID: http://hg.python.org/cpython/rev/363cdf7af849 changeset: 71844:363cdf7af849 parent: 71842:3b6655a72d1c parent: 71843:d04a31a43a15 user: ?ric Araujo date: Fri Aug 12 19:53:13 2011 +0200 summary: Merge 3.2 files: Tools/scripts/lfcr.py | 12 ++++++------ 1 files changed, 6 insertions(+), 6 deletions(-) diff --git a/Tools/scripts/lfcr.py b/Tools/scripts/lfcr.py --- a/Tools/scripts/lfcr.py +++ b/Tools/scripts/lfcr.py @@ -9,16 +9,16 @@ if os.path.isdir(filename): print(filename, "Directory!") continue - data = open(filename, "rb").read() - if '\0' in data: + with open(filename, "rb") as f: + data = f.read() + if b'\0' in data: print(filename, "Binary!") continue - newdata = re.sub("\r?\n", "\r\n", data) + newdata = re.sub(b"\r?\n", b"\r\n", data) if newdata != data: print(filename) - f = open(filename, "wb") - f.write(newdata) - f.close() + with open(filename, "wb") as f: + f.write(newdata) if __name__ == '__main__': main() -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Aug 13 00:42:09 2011 From: python-checkins at python.org (sandro.tosi) Date: Sat, 13 Aug 2011 00:42:09 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=282=2E7=29=3A_let_PySequence?= =?utf8?q?=5FCheck_me_a_link=3B_thanks_to_tomo_cocoa_from_docs=40?= Message-ID: http://hg.python.org/cpython/rev/70ace8ffa4ac changeset: 71845:70ace8ffa4ac branch: 2.7 parent: 71837:7d9024b7a001 user: Sandro Tosi date: Sat Aug 13 00:39:29 2011 +0200 summary: let PySequence_Check me a link; thanks to tomo cocoa from docs@ files: Doc/c-api/sequence.rst | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Doc/c-api/sequence.rst b/Doc/c-api/sequence.rst --- a/Doc/c-api/sequence.rst +++ b/Doc/c-api/sequence.rst @@ -199,7 +199,7 @@ Return the *i*\ th element of *o* or *NULL* on failure. Macro form of :cfunc:`PySequence_GetItem` but without checking that - :cfunc:`PySequence_Check(o)` is true and without adjustment for negative + :cfunc:`PySequence_Check` on *o* is true and without adjustment for negative indices. .. versionadded:: 2.3 -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Aug 13 00:42:10 2011 From: python-checkins at python.org (sandro.tosi) Date: Sat, 13 Aug 2011 00:42:10 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=283=2E2=29=3A_let_PySequence?= =?utf8?q?=5FCheck_me_a_link=3B_thanks_to_tomo_cocoa_from_docs=40?= Message-ID: http://hg.python.org/cpython/rev/5f690bf172b9 changeset: 71846:5f690bf172b9 branch: 3.2 parent: 71843:d04a31a43a15 user: Sandro Tosi date: Sat Aug 13 00:39:46 2011 +0200 summary: let PySequence_Check me a link; thanks to tomo cocoa from docs@ files: Doc/c-api/sequence.rst | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Doc/c-api/sequence.rst b/Doc/c-api/sequence.rst --- a/Doc/c-api/sequence.rst +++ b/Doc/c-api/sequence.rst @@ -149,7 +149,7 @@ Return the *i*\ th element of *o* or *NULL* on failure. Macro form of :c:func:`PySequence_GetItem` but without checking that - :c:func:`PySequence_Check(o)` is true and without adjustment for negative + :c:func:`PySequence_Check` on *o* is true and without adjustment for negative indices. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Aug 13 00:42:11 2011 From: python-checkins at python.org (sandro.tosi) Date: Sat, 13 Aug 2011 00:42:11 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=28merge_3=2E2_-=3E_default=29?= =?utf8?q?=3A_merge_with_3=2E2?= Message-ID: http://hg.python.org/cpython/rev/0937a0986b66 changeset: 71847:0937a0986b66 parent: 71844:363cdf7af849 parent: 71846:5f690bf172b9 user: Sandro Tosi date: Sat Aug 13 00:40:08 2011 +0200 summary: merge with 3.2 files: Doc/c-api/sequence.rst | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Doc/c-api/sequence.rst b/Doc/c-api/sequence.rst --- a/Doc/c-api/sequence.rst +++ b/Doc/c-api/sequence.rst @@ -149,7 +149,7 @@ Return the *i*\ th element of *o* or *NULL* on failure. Macro form of :c:func:`PySequence_GetItem` but without checking that - :c:func:`PySequence_Check(o)` is true and without adjustment for negative + :c:func:`PySequence_Check` on *o* is true and without adjustment for negative indices. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Aug 13 05:18:30 2011 From: python-checkins at python.org (benjamin.peterson) Date: Sat, 13 Aug 2011 05:18:30 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=283=2E2=29=3A_in_narrow_build?= =?utf8?q?s=2C_make_sure_to_test_codepoints_as_identifier_characters_=28cl?= =?utf8?q?oses?= Message-ID: http://hg.python.org/cpython/rev/787ed1a7aba8 changeset: 71848:787ed1a7aba8 branch: 3.2 parent: 71846:5f690bf172b9 user: Benjamin Peterson date: Fri Aug 12 22:17:18 2011 -0500 summary: in narrow builds, make sure to test codepoints as identifier characters (closes #12732) This fixes the use of Unicode identifiers outside the BMP in narrow builds. files: Lib/test/test_pep3131.py | 3 ++ Lib/test/test_unicode.py | 1 + Misc/NEWS | 3 ++ Objects/unicodeobject.c | 31 ++++++++++++++++++++------- 4 files changed, 30 insertions(+), 8 deletions(-) diff --git a/Lib/test/test_pep3131.py b/Lib/test/test_pep3131.py --- a/Lib/test/test_pep3131.py +++ b/Lib/test/test_pep3131.py @@ -8,9 +8,12 @@ ? = 1 ? = 2 # this is a compatibility character ? = 3 + ??????? = 4 self.assertEqual(getattr(T, "\xe4"), 1) self.assertEqual(getattr(T, "\u03bc"), 2) self.assertEqual(getattr(T, '\u87d2'), 3) + v = getattr(T, "\U0001d518\U0001d52b\U0001d526\U0001d520\U0001d52c\U0001d521\U0001d522") + self.assertEqual(v, 4) def test_invalid(self): try: diff --git a/Lib/test/test_unicode.py b/Lib/test/test_unicode.py --- a/Lib/test/test_unicode.py +++ b/Lib/test/test_unicode.py @@ -404,6 +404,7 @@ self.assertTrue("bc".isidentifier()) self.assertTrue("b_".isidentifier()) self.assertTrue("?".isidentifier()) + self.assertTrue("???????".isidentifier()) self.assertFalse(" ".isidentifier()) self.assertFalse("[".isidentifier()) diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -10,6 +10,9 @@ Core and Builtins ----------------- +- Issue #12732: In narrow unicode builds, allow Unicode identifiers which fall + outside the BMP. + - Issue #11603: Fix a crash when __str__ is rebound as __repr__. Patch by Andreas St?hrk. diff --git a/Objects/unicodeobject.c b/Objects/unicodeobject.c --- a/Objects/unicodeobject.c +++ b/Objects/unicodeobject.c @@ -7972,14 +7972,30 @@ return PyBool_FromLong(1); } +static Py_UCS4 +decode_ucs4(const Py_UNICODE *s, Py_ssize_t *i, Py_ssize_t size) +{ + Py_UCS4 ch; + assert(*i < size); + ch = s[(*i)++]; +#ifndef Py_UNICODE_WIDE + if ((ch & 0xfffffc00) == 0xd800 && + *i < size + && (s[*i] & 0xFFFFFC00) == 0xDC00) + ch = ((Py_UCS4)ch << 10UL) + (Py_UCS4)(s[(*i)++]) - 0x35fdc00; +#endif + return ch; +} + int PyUnicode_IsIdentifier(PyObject *self) { - register const Py_UNICODE *p = PyUnicode_AS_UNICODE((PyUnicodeObject*)self); - register const Py_UNICODE *e; + Py_ssize_t i = 0, size = PyUnicode_GET_SIZE(self); + Py_UCS4 first; + const Py_UNICODE *p = PyUnicode_AS_UNICODE((PyUnicodeObject*)self); /* Special case for empty strings */ - if (PyUnicode_GET_SIZE(self) == 0) + if (!size) return 0; /* PEP 3131 says that the first character must be in @@ -7990,14 +8006,13 @@ definition of XID_Start and XID_Continue, it is sufficient to check just for these, except that _ must be allowed as starting an identifier. */ - if (!_PyUnicode_IsXidStart(*p) && *p != 0x5F /* LOW LINE */) + first = decode_ucs4(p, &i, size); + if (!_PyUnicode_IsXidStart(first) && first != 0x5F /* LOW LINE */) return 0; - e = p + PyUnicode_GET_SIZE(self); - for (p++; p < e; p++) { - if (!_PyUnicode_IsXidContinue(*p)) + while (i < size) + if (!_PyUnicode_IsXidContinue(decode_ucs4(p, &i, size))) return 0; - } return 1; } -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Aug 13 05:18:31 2011 From: python-checkins at python.org (benjamin.peterson) Date: Sat, 13 Aug 2011 05:18:31 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=28merge_3=2E2_-=3E_default=29?= =?utf8?b?OiBtZXJnZSAzLjIgKCMxMjczMik=?= Message-ID: http://hg.python.org/cpython/rev/5af15f018e20 changeset: 71849:5af15f018e20 parent: 71847:0937a0986b66 parent: 71848:787ed1a7aba8 user: Benjamin Peterson date: Fri Aug 12 22:18:19 2011 -0500 summary: merge 3.2 (#12732) files: Lib/test/test_pep3131.py | 3 ++ Lib/test/test_unicode.py | 1 + Misc/NEWS | 3 ++ Objects/unicodeobject.c | 31 ++++++++++++++++++++------- 4 files changed, 30 insertions(+), 8 deletions(-) diff --git a/Lib/test/test_pep3131.py b/Lib/test/test_pep3131.py --- a/Lib/test/test_pep3131.py +++ b/Lib/test/test_pep3131.py @@ -8,9 +8,12 @@ ? = 1 ? = 2 # this is a compatibility character ? = 3 + ??????? = 4 self.assertEqual(getattr(T, "\xe4"), 1) self.assertEqual(getattr(T, "\u03bc"), 2) self.assertEqual(getattr(T, '\u87d2'), 3) + v = getattr(T, "\U0001d518\U0001d52b\U0001d526\U0001d520\U0001d52c\U0001d521\U0001d522") + self.assertEqual(v, 4) def test_invalid(self): try: diff --git a/Lib/test/test_unicode.py b/Lib/test/test_unicode.py --- a/Lib/test/test_unicode.py +++ b/Lib/test/test_unicode.py @@ -404,6 +404,7 @@ self.assertTrue("bc".isidentifier()) self.assertTrue("b_".isidentifier()) self.assertTrue("?".isidentifier()) + self.assertTrue("???????".isidentifier()) self.assertFalse(" ".isidentifier()) self.assertFalse("[".isidentifier()) diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -10,6 +10,9 @@ Core and Builtins ----------------- +- Issue #12732: In narrow unicode builds, allow Unicode identifiers which fall + outside the BMP. + - Issue #12575: Validate user-generated AST before it is compiled. - Make type(None), type(Ellipsis), and type(NotImplemented) callable. They diff --git a/Objects/unicodeobject.c b/Objects/unicodeobject.c --- a/Objects/unicodeobject.c +++ b/Objects/unicodeobject.c @@ -8044,14 +8044,30 @@ return PyBool_FromLong(1); } +static Py_UCS4 +decode_ucs4(const Py_UNICODE *s, Py_ssize_t *i, Py_ssize_t size) +{ + Py_UCS4 ch; + assert(*i < size); + ch = s[(*i)++]; +#ifndef Py_UNICODE_WIDE + if ((ch & 0xfffffc00) == 0xd800 && + *i < size + && (s[*i] & 0xFFFFFC00) == 0xDC00) + ch = ((Py_UCS4)ch << 10UL) + (Py_UCS4)(s[(*i)++]) - 0x35fdc00; +#endif + return ch; +} + int PyUnicode_IsIdentifier(PyObject *self) { - register const Py_UNICODE *p = PyUnicode_AS_UNICODE((PyUnicodeObject*)self); - register const Py_UNICODE *e; + Py_ssize_t i = 0, size = PyUnicode_GET_SIZE(self); + Py_UCS4 first; + const Py_UNICODE *p = PyUnicode_AS_UNICODE((PyUnicodeObject*)self); /* Special case for empty strings */ - if (PyUnicode_GET_SIZE(self) == 0) + if (!size) return 0; /* PEP 3131 says that the first character must be in @@ -8062,14 +8078,13 @@ definition of XID_Start and XID_Continue, it is sufficient to check just for these, except that _ must be allowed as starting an identifier. */ - if (!_PyUnicode_IsXidStart(*p) && *p != 0x5F /* LOW LINE */) + first = decode_ucs4(p, &i, size); + if (!_PyUnicode_IsXidStart(first) && first != 0x5F /* LOW LINE */) return 0; - e = p + PyUnicode_GET_SIZE(self); - for (p++; p < e; p++) { - if (!_PyUnicode_IsXidContinue(*p)) + while (i < size) + if (!_PyUnicode_IsXidContinue(decode_ucs4(p, &i, size))) return 0; - } return 1; } -- Repository URL: http://hg.python.org/cpython From solipsis at pitrou.net Sat Aug 13 05:24:58 2011 From: solipsis at pitrou.net (solipsis at pitrou.net) Date: Sat, 13 Aug 2011 05:24:58 +0200 Subject: [Python-checkins] Daily reference leaks (0937a0986b66): sum=0 Message-ID: results for 0937a0986b66 on branch "default" -------------------------------------------- Command line was: ['./python', '-m', 'test.regrtest', '-uall', '-R', '3:3:/home/antoine/cpython/refleaks/reflogPvKl7d', '-x'] From python-checkins at python.org Sat Aug 13 06:10:59 2011 From: python-checkins at python.org (benjamin.peterson) Date: Sat, 13 Aug 2011 06:10:59 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_include_header_with_PyAST?= =?utf8?q?=5FValidate?= Message-ID: http://hg.python.org/cpython/rev/381a6def7d5f changeset: 71850:381a6def7d5f user: Benjamin Peterson date: Fri Aug 12 23:10:50 2011 -0500 summary: include header with PyAST_Validate files: Python/bltinmodule.c | 3 +++ 1 files changed, 3 insertions(+), 0 deletions(-) diff --git a/Python/bltinmodule.c b/Python/bltinmodule.c --- a/Python/bltinmodule.c +++ b/Python/bltinmodule.c @@ -6,6 +6,9 @@ #include "node.h" #include "code.h" +#include "asdl.h" +#include "ast.h" + #include #ifdef HAVE_LANGINFO_H -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Aug 13 06:36:03 2011 From: python-checkins at python.org (benjamin.peterson) Date: Sat, 13 Aug 2011 06:36:03 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=283=2E2=29=3A_normalization_i?= =?utf8?q?s_different_between_unicode_builds=2C_so_use_a_new_non-BMP_char?= Message-ID: http://hg.python.org/cpython/rev/3e9c882808b3 changeset: 71851:3e9c882808b3 branch: 3.2 parent: 71848:787ed1a7aba8 user: Benjamin Peterson date: Fri Aug 12 23:35:34 2011 -0500 summary: normalization is different between unicode builds, so use a new non-BMP char and add normalization test files: Lib/test/test_pep3131.py | 15 ++++++++++++--- 1 files changed, 12 insertions(+), 3 deletions(-) diff --git a/Lib/test/test_pep3131.py b/Lib/test/test_pep3131.py --- a/Lib/test/test_pep3131.py +++ b/Lib/test/test_pep3131.py @@ -1,4 +1,5 @@ import unittest +import sys from test import support class PEP3131Test(unittest.TestCase): @@ -8,12 +9,20 @@ ? = 1 ? = 2 # this is a compatibility character ? = 3 - ??????? = 4 + x? = 4 self.assertEqual(getattr(T, "\xe4"), 1) self.assertEqual(getattr(T, "\u03bc"), 2) self.assertEqual(getattr(T, '\u87d2'), 3) - v = getattr(T, "\U0001d518\U0001d52b\U0001d526\U0001d520\U0001d52c\U0001d521\U0001d522") - self.assertEqual(v, 4) + self.assertEqual(getattr(T, 'x\U000E0100'), 4) + + def test_non_bmp_normalized(self): + ??????? = 1 + # On wide builds, this is normalized, but on narrow ones it is not. See + # #12746. + try: + self.assertIn("???????", dir()) + except AssertionError: + raise unittest.case._ExpectedFailure(sys.exc_info()) def test_invalid(self): try: -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Aug 13 06:36:05 2011 From: python-checkins at python.org (benjamin.peterson) Date: Sat, 13 Aug 2011 06:36:05 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=28merge_3=2E2_-=3E_default=29?= =?utf8?q?=3A_merge_3=2E2?= Message-ID: http://hg.python.org/cpython/rev/9b852ceb8858 changeset: 71852:9b852ceb8858 parent: 71850:381a6def7d5f parent: 71851:3e9c882808b3 user: Benjamin Peterson date: Fri Aug 12 23:35:46 2011 -0500 summary: merge 3.2 files: Lib/test/test_pep3131.py | 15 ++++++++++++--- 1 files changed, 12 insertions(+), 3 deletions(-) diff --git a/Lib/test/test_pep3131.py b/Lib/test/test_pep3131.py --- a/Lib/test/test_pep3131.py +++ b/Lib/test/test_pep3131.py @@ -1,4 +1,5 @@ import unittest +import sys from test import support class PEP3131Test(unittest.TestCase): @@ -8,12 +9,20 @@ ? = 1 ? = 2 # this is a compatibility character ? = 3 - ??????? = 4 + x? = 4 self.assertEqual(getattr(T, "\xe4"), 1) self.assertEqual(getattr(T, "\u03bc"), 2) self.assertEqual(getattr(T, '\u87d2'), 3) - v = getattr(T, "\U0001d518\U0001d52b\U0001d526\U0001d520\U0001d52c\U0001d521\U0001d522") - self.assertEqual(v, 4) + self.assertEqual(getattr(T, 'x\U000E0100'), 4) + + def test_non_bmp_normalized(self): + ??????? = 1 + # On wide builds, this is normalized, but on narrow ones it is not. See + # #12746. + try: + self.assertIn("???????", dir()) + except AssertionError: + raise unittest.case._ExpectedFailure(sys.exc_info()) def test_invalid(self): try: -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Aug 13 07:33:39 2011 From: python-checkins at python.org (benjamin.peterson) Date: Sat, 13 Aug 2011 07:33:39 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=283=2E2=29=3A_tokenize_is_jus?= =?utf8?q?t_broken_on_test=5Fpep3131=2Epy?= Message-ID: http://hg.python.org/cpython/rev/c13abed5d764 changeset: 71853:c13abed5d764 branch: 3.2 parent: 71851:3e9c882808b3 user: Benjamin Peterson date: Sat Aug 13 00:33:21 2011 -0500 summary: tokenize is just broken on test_pep3131.py files: Lib/test/test_tokenize.py | 3 +++ 1 files changed, 3 insertions(+), 0 deletions(-) diff --git a/Lib/test/test_tokenize.py b/Lib/test/test_tokenize.py --- a/Lib/test/test_tokenize.py +++ b/Lib/test/test_tokenize.py @@ -520,6 +520,9 @@ >>> tempdir = os.path.dirname(f) or os.curdir >>> testfiles = glob.glob(os.path.join(tempdir, "test*.py")) +tokenize is broken on test_pep3131.py because regular expressions are broken on +the obscure unicode identifiers in it. *sigh* + >>> testfiles.remove(os.path.join(tempdir, "test_pep3131.py")) >>> if not support.is_resource_enabled("cpu"): ... testfiles = random.sample(testfiles, 10) ... -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Aug 13 07:33:39 2011 From: python-checkins at python.org (benjamin.peterson) Date: Sat, 13 Aug 2011 07:33:39 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=28merge_3=2E2_-=3E_default=29?= =?utf8?q?=3A_merge_3=2E2?= Message-ID: http://hg.python.org/cpython/rev/6ff43efb8d55 changeset: 71854:6ff43efb8d55 parent: 71852:9b852ceb8858 parent: 71853:c13abed5d764 user: Benjamin Peterson date: Sat Aug 13 00:33:29 2011 -0500 summary: merge 3.2 files: Lib/test/test_tokenize.py | 3 +++ 1 files changed, 3 insertions(+), 0 deletions(-) diff --git a/Lib/test/test_tokenize.py b/Lib/test/test_tokenize.py --- a/Lib/test/test_tokenize.py +++ b/Lib/test/test_tokenize.py @@ -520,6 +520,9 @@ >>> tempdir = os.path.dirname(f) or os.curdir >>> testfiles = glob.glob(os.path.join(tempdir, "test*.py")) +tokenize is broken on test_pep3131.py because regular expressions are broken on +the obscure unicode identifiers in it. *sigh* + >>> testfiles.remove(os.path.join(tempdir, "test_pep3131.py")) >>> if not support.is_resource_enabled("cpu"): ... testfiles = random.sample(testfiles, 10) ... -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Aug 13 09:50:30 2011 From: python-checkins at python.org (ezio.melotti) Date: Sat, 13 Aug 2011 09:50:30 +0200 (CEST) Subject: [Python-checkins] r88881 - in tracker/instances/python-dev: extensions/jnosy.py html/issue.item.html html/issue.item.js html/style.css html/user.devs.html html/user.experts.html Message-ID: <3RbZxy5bPSzN5Z@mail.python.org> Author: ezio.melotti Date: Sat Aug 13 09:50:30 2011 New Revision: 88881 Log: #417: add an autocomplete for the nosy list. Added: tracker/instances/python-dev/extensions/jnosy.py tracker/instances/python-dev/html/user.devs.html tracker/instances/python-dev/html/user.experts.html Modified: tracker/instances/python-dev/html/issue.item.html tracker/instances/python-dev/html/issue.item.js tracker/instances/python-dev/html/style.css Added: tracker/instances/python-dev/extensions/jnosy.py ============================================================================== --- (empty file) +++ tracker/instances/python-dev/extensions/jnosy.py Sat Aug 13 09:50:30 2011 @@ -0,0 +1,84 @@ +""" +This module provides two helper functions used by the Javascript autocomplete +of the nosy list: + 1) a simple state machine to parse the tables of the + experts index and turn them in a JSON object; + 2) a function to get the list of developers as a JSON object; +""" + +import urllib +try: + import json +except ImportError: + import simplejson as json + +url = 'http://hg.python.org/devguide/raw-file/default/experts.rst' + +# possible states +no_table = 0 # not parsing a table +table_header = 1 # parsing the header +table_content = 2 # parsing the content +table_end = 3 # reached the end of the table + +def experts_as_json(): + """ + Parse the tables of the experts index and turn them into a JSON object. + """ + data = {} + table_state = no_table + + try: + page = urllib.urlopen(url) + except Exception: + # if something goes wrong just return an empty JSON object + return '{}' + + for line in page: + columns = [column.strip() for column in line.split(' ', 1)] + # all the tables have 2 columns (some entries might not have experts, + # so we just skip them) + if len(columns) != 2: + continue + first, second = columns + # check if we found a table separator + if set(first) == set(second) == set('='): + table_state += 1 + if table_state == table_end: + table_state = no_table + continue + if table_state == table_header: + # create a dict for the category (e.g. 'Modules', 'Interest areas') + category = first + data[category] = {} + if table_state == table_content: + # add to the category dict the entries for that category + # (e.g.module names) and the list of experts + # if the entry is empty the names belong to the previous entry + entry = first or entry + names = (name.strip(' *') for name in second.split(',')) + names = ','.join(name for name in names if '(inactive)' not in name) + if not first: + data[category][entry] += names + else: + data[category][entry] = names + return json.dumps(data, separators=(',',':')) + + +def devs_as_json(cls): + """ + Generate a JSON object that contains the username and realname of all + the committers. + """ + users = [] + for user in cls.filter(None, {'iscommitter': 1}): + username = user.username.plain() + realname = user.realname.plain(unchecked=1) + if not realname: + continue + users.append([username, realname]) + return json.dumps(users, separators=(',',':')) + + +def init(instance): + instance.registerUtil('experts_as_json', experts_as_json) + instance.registerUtil('devs_as_json', devs_as_json) Modified: tracker/instances/python-dev/html/issue.item.html ============================================================================== --- tracker/instances/python-dev/html/issue.item.html (original) +++ tracker/instances/python-dev/html/issue.item.html Sat Aug 13 09:50:30 2011 @@ -11,8 +11,10 @@ - + + + Modified: tracker/instances/python-dev/html/issue.item.js ============================================================================== --- tracker/instances/python-dev/html/issue.item.js (original) +++ tracker/instances/python-dev/html/issue.item.js Sat Aug 13 09:50:30 2011 @@ -62,3 +62,173 @@ } }); }) + + +$(document).ready(function() { + /* Add an autocomplete to the nosy list that searches the term in two lists: + 1) the list of developers (both the user and the real name); + 2) the list of experts in the devguide; + See also the "categories" and "multiple values" examples at + http://jqueryui.com/demos/autocomplete/. */ + + if ($("input[name=nosy]").length == 0) { + // if we can't find the nosy , the user can't edit the nosy + // so there's no need to load the autocomplete + return; + } + + // create a custom widget to group the entries in categories + $.widget("custom.catcomplete", $.ui.autocomplete, { + _renderMenu: function(ul, items) { + var self = this, current_category = ""; + // loop through the items, adding a
  • when a new category is + // found, and then render the item in the
      + $.each(items, function(index, item) { + if (item.category != current_category) { + ul.append("
    • " + item.category + "
    • "); + current_category = item.category; + } + self._renderItem(ul, item); + }); + } + }); + + function split(val) { + return val.split(/\s*,\s*/); + } + function extract_last(term) { + return split(term).pop(); + } + function unix_time() { + return Math.floor(new Date().getTime() / 1000); + } + function is_expired(time_str) { + // check if the cached file is older than 1 day + return ((unix_time() - parseInt(time_str)) > 24*60*60); + } + + // this will be called once we have retrieved the data + function add_autocomplete(data) { + $("input[name=nosy]") + // don't navigate away from the field on tab when selecting an item + .bind("keydown", function(event) { + if (event.keyCode === $.ui.keyCode.TAB && + $(this).data("autocomplete").menu.active) { + event.preventDefault(); + } + }) + .catcomplete({ + minLength: 2, // this doesn't seem to work + delay: 0, + source: function(request, response) { + // delegate back to autocomplete, but extract the last term + response($.ui.autocomplete.filter( + data, extract_last(request.term))); + }, + focus: function() { + // prevent value inserted on focus + return false; + }, + select: function(event, ui) { + var usernames = split(this.value); + // remove the current input + usernames.pop(); + // add the selected item + $.each(split(ui.item.value), function(i, username) { + // check if any of the usernames are already there + if ($.inArray(username, usernames) == -1) + usernames.push(username); + }); + // add placeholder to get the comma at the end + usernames.push(""); + this.value = usernames.join(",") ; + return false; + } + }); + } + + + // check if we have HTML5 storage available + try { + var supports_html5_storage = !!localStorage.getItem; + } catch(e) { + var supports_html5_storage = false; + } + + // this object receives the entries for the devs and experts and + // when it has both it calls add_autocomplete + var data = { + devs: null, + experts: null, + add: function(data, type) { + // type is either 'devs' or 'experts' + this[type] = data; + if (this.devs && this.experts) + add_autocomplete(this.devs.concat(this.experts)) + } + }; + + /* Note: instead of using a nested structure like: + {"Platform": {"plat1": "name1,name2", "plat2": "name3,name4", ...}, + "Module": {"mod1": "name1,name2", "mod2": "name3,name4", ...}, + ...} + (i.e. the same format sent by the server), we have to change it and + repeat the category for each entry, because the autocomplete wants a + flat structure like: + [{label: "plat1: name1,name2", value: "name1,name2", category: "Platform"}, + {label: "plat2: name3,name4", value: "name3,name4", category: "Platform"}, + {label: "mod1: name1,name2", value: "name1,name2", category: "Module"}, + {label: "mod2: name3,name4", value: "name3,name4", category: "Module"}, + ...]. + Passing a nested structure to ui.autocomplete.filter() and attempt + further parsing in _renderMenu doesn't seem to work. + */ + function get_json(file, callback) { + // Get the JSON from either the HTML5 storage or the server. + // file is either 'devs' or 'experts', + // the callback is called once the json is retrieved + var json; + if (supports_html5_storage && + ((json = localStorage[file]) != null) && + !is_expired(localStorage[file+'time'])) { + // if we have HTML5 storage and already cached the JSON, use it + callback(JSON.parse(json), file); + } + else { + // if we don't have HTML5 storage or the cache is empty, request + // the JSON to the server + $.getJSON('user?@template='+file, function(rawdata) { + var objects = []; // array of objs with label, value, category + if (file == 'devs') { + // save devs as 'Name Surname (user.name)' + $.each(rawdata, function(index, names) { + objects.push({label: names[1] + ' (' + names[0] + ')', + value: names[0], category: 'Developer'}); + }); + } + else { + // save experts as e.g. 'modname: user1,user2' + $.each(rawdata, function(category, entries) { + $.each(entries, function(entry, names) { + objects.push({label: entry + ': ' + names, + value: names, category: category}); + }); + }); + } + // cache the objects if we have HTML5 storage + if (supports_html5_storage) { + localStorage[file] = JSON.stringify(objects); + localStorage[file+'time'] = unix_time(); + } + callback(objects, file); + }); + } + } + + // request the JSON. This will get it from the HTML5 storage if it's there + // or request it to the server if it's not, The JSON will be passed to the + // data object, that will wait to get both the files before calling the + // add_autocomplete function. + get_json('experts', data.add); + get_json('devs', data.add); +}); Modified: tracker/instances/python-dev/html/style.css ============================================================================== --- tracker/instances/python-dev/html/style.css (original) +++ tracker/instances/python-dev/html/style.css Sat Aug 13 09:50:30 2011 @@ -513,3 +513,16 @@ .calendar_display .today { background-color: #afafaf; } + +.ui-autocomplete-category { + font-weight: bold; + padding: 0 .2em; + line-height: 1.2; +} + +.ui-autocomplete { + font-size: 75% !important; + max-height: 25em; + max-width: 20em; + overflow: auto; +} Added: tracker/instances/python-dev/html/user.devs.html ============================================================================== --- (empty file) +++ tracker/instances/python-dev/html/user.devs.html Sat Aug 13 09:50:30 2011 @@ -0,0 +1,4 @@ + + [["username1","Real Name1"],["username2", "Real Name2"],...] + Added: tracker/instances/python-dev/html/user.experts.html ============================================================================== --- (empty file) +++ tracker/instances/python-dev/html/user.experts.html Sat Aug 13 09:50:30 2011 @@ -0,0 +1,5 @@ + +{"Platform":{"platname":"name1,name2",...}, + "Module":{"modname":"name1,name2",...}, + ...} + From python-checkins at python.org Sat Aug 13 11:48:44 2011 From: python-checkins at python.org (georg.brandl) Date: Sat, 13 Aug 2011 11:48:44 +0200 Subject: [Python-checkins] =?utf8?b?Y3B5dGhvbiAoMy4yKTogRml4ICMxMTUxMzog?= =?utf8?q?wrong_exception_handling_for_the_case_that_GzipFile_itself_raise?= =?utf8?q?s?= Message-ID: http://hg.python.org/cpython/rev/843cd43206b4 changeset: 71855:843cd43206b4 branch: 3.2 parent: 71853:c13abed5d764 user: Georg Brandl date: Sat Aug 13 11:48:12 2011 +0200 summary: Fix #11513: wrong exception handling for the case that GzipFile itself raises an IOError. files: Lib/tarfile.py | 6 ++++-- Lib/test/test_tarfile.py | 8 ++++++++ Misc/NEWS | 3 +++ 3 files changed, 15 insertions(+), 2 deletions(-) diff --git a/Lib/tarfile.py b/Lib/tarfile.py --- a/Lib/tarfile.py +++ b/Lib/tarfile.py @@ -1804,11 +1804,13 @@ fileobj = gzip.GzipFile(name, mode + "b", compresslevel, fileobj) t = cls.taropen(name, mode, fileobj, **kwargs) except IOError: - if not extfileobj: + if not extfileobj and fileobj is not None: fileobj.close() + if fileobj is None: + raise raise ReadError("not a gzip file") except: - if not extfileobj: + if not extfileobj and fileobj is not None: fileobj.close() raise t._extfileobj = extfileobj diff --git a/Lib/test/test_tarfile.py b/Lib/test/test_tarfile.py --- a/Lib/test/test_tarfile.py +++ b/Lib/test/test_tarfile.py @@ -1682,6 +1682,14 @@ class GzipMiscReadTest(MiscReadTest): tarname = gzipname mode = "r:gz" + + def test_non_existent_targz_file(self): + # Test for issue11513: prevent non-existent gzipped tarfiles raising + # multiple exceptions. + with self.assertRaisesRegex(IOError, "xxx") as ex: + tarfile.open("xxx", self.mode) + self.assertEqual(ex.exception.errno, errno.ENOENT) + class GzipUstarReadTest(UstarReadTest): tarname = gzipname mode = "r:gz" diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -44,6 +44,9 @@ Library ------- +- Issue #11513: Fix exception handling ``tarfile.TarFile.gzopen()`` when + the file cannot be opened. + - Issue #12687: Fix a possible buffering bug when unpickling text mode (protocol 0, mostly) pickles. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Aug 13 11:48:45 2011 From: python-checkins at python.org (georg.brandl) Date: Sat, 13 Aug 2011 11:48:45 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=28merge_3=2E2_-=3E_default=29?= =?utf8?q?=3A_Merge_with_3=2E2=2E?= Message-ID: http://hg.python.org/cpython/rev/cc8b8fd41567 changeset: 71856:cc8b8fd41567 parent: 71854:6ff43efb8d55 parent: 71855:843cd43206b4 user: Georg Brandl date: Sat Aug 13 11:48:40 2011 +0200 summary: Merge with 3.2. files: Lib/tarfile.py | 6 ++++-- Lib/test/test_tarfile.py | 8 ++++++++ Misc/NEWS | 3 +++ 3 files changed, 15 insertions(+), 2 deletions(-) diff --git a/Lib/tarfile.py b/Lib/tarfile.py --- a/Lib/tarfile.py +++ b/Lib/tarfile.py @@ -1802,11 +1802,13 @@ fileobj = gzip.GzipFile(name, mode + "b", compresslevel, fileobj) t = cls.taropen(name, mode, fileobj, **kwargs) except IOError: - if not extfileobj: + if not extfileobj and fileobj is not None: fileobj.close() + if fileobj is None: + raise raise ReadError("not a gzip file") except: - if not extfileobj: + if not extfileobj and fileobj is not None: fileobj.close() raise t._extfileobj = extfileobj diff --git a/Lib/test/test_tarfile.py b/Lib/test/test_tarfile.py --- a/Lib/test/test_tarfile.py +++ b/Lib/test/test_tarfile.py @@ -1682,6 +1682,14 @@ class GzipMiscReadTest(MiscReadTest): tarname = gzipname mode = "r:gz" + + def test_non_existent_targz_file(self): + # Test for issue11513: prevent non-existent gzipped tarfiles raising + # multiple exceptions. + with self.assertRaisesRegex(IOError, "xxx") as ex: + tarfile.open("xxx", self.mode) + self.assertEqual(ex.exception.errno, errno.ENOENT) + class GzipUstarReadTest(UstarReadTest): tarname = gzipname mode = "r:gz" diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -254,6 +254,9 @@ Library ------- +- Issue #11513: Fix exception handling ``tarfile.TarFile.gzopen()`` when + the file cannot be opened. + - Issue #12687: Fix a possible buffering bug when unpickling text mode (protocol 0, mostly) pickles. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Aug 13 15:28:41 2011 From: python-checkins at python.org (nadeem.vawda) Date: Sat, 13 Aug 2011 15:28:41 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Issue_=2312646=3A_Add_an_?= =?utf8?q?=27eof=27_attribute_to_zlib=2EDecompress=2E?= Message-ID: http://hg.python.org/cpython/rev/bb6c2d5c811d changeset: 71857:bb6c2d5c811d user: Nadeem Vawda date: Sat Aug 13 15:22:40 2011 +0200 summary: Issue #12646: Add an 'eof' attribute to zlib.Decompress. This will make it easier to detect truncated input streams. Also, make zlib's error messages more consistent. files: Doc/library/zlib.rst | 20 ++++++++++++-------- Lib/test/test_zlib.py | 20 ++++++++++++++++++++ Misc/NEWS | 3 +++ Modules/zlibmodule.c | 19 +++++++++++++------ 4 files changed, 48 insertions(+), 14 deletions(-) diff --git a/Doc/library/zlib.rst b/Doc/library/zlib.rst --- a/Doc/library/zlib.rst +++ b/Doc/library/zlib.rst @@ -152,7 +152,7 @@ compress a set of data that share a common initial prefix. -Decompression objects support the following methods, and two attributes: +Decompression objects support the following methods and attributes: .. attribute:: Decompress.unused_data @@ -162,13 +162,6 @@ available. If the whole bytestring turned out to contain compressed data, this is ``b""``, an empty bytes object. - The only way to determine where a bytestring of compressed data ends is by actually - decompressing it. This means that when compressed data is contained part of a - larger file, you can only find the end of it by reading data and feeding it - followed by some non-empty bytestring into a decompression object's - :meth:`decompress` method until the :attr:`unused_data` attribute is no longer - empty. - .. attribute:: Decompress.unconsumed_tail @@ -179,6 +172,17 @@ :meth:`decompress` method call in order to get correct output. +.. attribute:: Decompress.eof + + A boolean indicating whether the end of the compressed data stream has been + reached. + + This makes it possible to distinguish between a properly-formed compressed + stream, and an incomplete or truncated one. + + .. versionadded:: 3.3 + + .. method:: Decompress.decompress(data[, max_length]) Decompress *data*, returning a bytes object containing the uncompressed data diff --git a/Lib/test/test_zlib.py b/Lib/test/test_zlib.py --- a/Lib/test/test_zlib.py +++ b/Lib/test/test_zlib.py @@ -447,6 +447,26 @@ y += dco.flush() self.assertEqual(y, b'foo') + def test_decompress_eof(self): + x = b'x\x9cK\xcb\xcf\x07\x00\x02\x82\x01E' # 'foo' + dco = zlib.decompressobj() + self.assertFalse(dco.eof) + dco.decompress(x[:-5]) + self.assertFalse(dco.eof) + dco.decompress(x[-5:]) + self.assertTrue(dco.eof) + dco.flush() + self.assertTrue(dco.eof) + + def test_decompress_eof_incomplete_stream(self): + x = b'x\x9cK\xcb\xcf\x07\x00\x02\x82\x01E' # 'foo' + dco = zlib.decompressobj() + self.assertFalse(dco.eof) + dco.decompress(x[:-5]) + self.assertFalse(dco.eof) + dco.flush() + self.assertFalse(dco.eof) + if hasattr(zlib.compressobj(), "copy"): def test_compresscopy(self): # Test copying a compression object diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -254,6 +254,9 @@ Library ------- +- Issue #12646: Add an 'eof' attribute to zlib.Decompress, to make it easier to + detect truncated input streams. + - Issue #11513: Fix exception handling ``tarfile.TarFile.gzopen()`` when the file cannot be opened. diff --git a/Modules/zlibmodule.c b/Modules/zlibmodule.c --- a/Modules/zlibmodule.c +++ b/Modules/zlibmodule.c @@ -43,6 +43,7 @@ z_stream zst; PyObject *unused_data; PyObject *unconsumed_tail; + char eof; int is_initialised; #ifdef WITH_THREAD PyThread_type_lock lock; @@ -89,6 +90,7 @@ self = PyObject_New(compobject, type); if (self == NULL) return NULL; + self->eof = 0; self->is_initialised = 0; self->unused_data = PyBytes_FromStringAndSize("", 0); if (self->unused_data == NULL) { @@ -291,7 +293,7 @@ err = inflateEnd(&zst); if (err != Z_OK) { - zlib_error(zst, err, "while finishing data decompression"); + zlib_error(zst, err, "while finishing decompression"); goto error; } @@ -476,7 +478,7 @@ */ if (err != Z_OK && err != Z_BUF_ERROR) { - zlib_error(self->zst, err, "while compressing"); + zlib_error(self->zst, err, "while compressing data"); Py_DECREF(RetVal); RetVal = NULL; goto error; @@ -611,12 +613,13 @@ Py_DECREF(RetVal); goto error; } + self->eof = 1; /* We will only get Z_BUF_ERROR if the output buffer was full but there wasn't more output when we tried again, so it is not an error condition. */ } else if (err != Z_OK && err != Z_BUF_ERROR) { - zlib_error(self->zst, err, "while decompressing"); + zlib_error(self->zst, err, "while decompressing data"); Py_DECREF(RetVal); RetVal = NULL; goto error; @@ -697,7 +700,7 @@ if (err == Z_STREAM_END && flushmode == Z_FINISH) { err = deflateEnd(&(self->zst)); if (err != Z_OK) { - zlib_error(self->zst, err, "from deflateEnd()"); + zlib_error(self->zst, err, "while finishing compression"); Py_DECREF(RetVal); RetVal = NULL; goto error; @@ -765,6 +768,7 @@ Py_XDECREF(retval->unconsumed_tail); retval->unused_data = self->unused_data; retval->unconsumed_tail = self->unconsumed_tail; + retval->eof = self->eof; /* Mark it as being initialized */ retval->is_initialised = 1; @@ -816,6 +820,7 @@ Py_XDECREF(retval->unconsumed_tail); retval->unused_data = self->unused_data; retval->unconsumed_tail = self->unconsumed_tail; + retval->eof = self->eof; /* Mark it as being initialized */ retval->is_initialised = 1; @@ -885,10 +890,11 @@ various data structures. Note we should only get Z_STREAM_END when flushmode is Z_FINISH */ if (err == Z_STREAM_END) { + self->eof = 1; + self->is_initialised = 0; err = inflateEnd(&(self->zst)); - self->is_initialised = 0; if (err != Z_OK) { - zlib_error(self->zst, err, "from inflateEnd()"); + zlib_error(self->zst, err, "while finishing decompression"); Py_DECREF(retval); retval = NULL; goto error; @@ -936,6 +942,7 @@ static PyMemberDef Decomp_members[] = { {"unused_data", T_OBJECT, COMP_OFF(unused_data), READONLY}, {"unconsumed_tail", T_OBJECT, COMP_OFF(unconsumed_tail), READONLY}, + {"eof", T_BOOL, COMP_OFF(eof), READONLY}, {NULL}, }; -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Aug 13 15:45:45 2011 From: python-checkins at python.org (nadeem.vawda) Date: Sat, 13 Aug 2011 15:45:45 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Fix_incorrect_comment_in_zl?= =?utf8?q?ib=2EDecompress=2Eflush=28=29=2E?= Message-ID: http://hg.python.org/cpython/rev/65d61ed991d9 changeset: 71858:65d61ed991d9 user: Nadeem Vawda date: Sat Aug 13 15:42:50 2011 +0200 summary: Fix incorrect comment in zlib.Decompress.flush(). Reported by Oleg Oshmyan in issue #12646. files: Modules/zlibmodule.c | 4 +--- 1 files changed, 1 insertions(+), 3 deletions(-) diff --git a/Modules/zlibmodule.c b/Modules/zlibmodule.c --- a/Modules/zlibmodule.c +++ b/Modules/zlibmodule.c @@ -886,9 +886,7 @@ Py_END_ALLOW_THREADS } - /* If flushmode is Z_FINISH, we also have to call deflateEnd() to free - various data structures. Note we should only get Z_STREAM_END when - flushmode is Z_FINISH */ + /* If at end of stream, clean up any memory allocated by zlib. */ if (err == Z_STREAM_END) { self->eof = 1; self->is_initialised = 0; -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Aug 13 15:45:46 2011 From: python-checkins at python.org (nadeem.vawda) Date: Sat, 13 Aug 2011 15:45:46 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Issue_=2312669=3A_Fix_test?= =?utf8?q?=5Fcurses_so_that_it_can_run_on_the_buildbots=2E?= Message-ID: http://hg.python.org/cpython/rev/4358909ee221 changeset: 71859:4358909ee221 user: Nadeem Vawda date: Sat Aug 13 15:43:49 2011 +0200 summary: Issue #12669: Fix test_curses so that it can run on the buildbots. files: Lib/test/test_curses.py | 6 +++--- 1 files changed, 3 insertions(+), 3 deletions(-) diff --git a/Lib/test/test_curses.py b/Lib/test/test_curses.py --- a/Lib/test/test_curses.py +++ b/Lib/test/test_curses.py @@ -276,11 +276,11 @@ curses.resetty() def test_main(): - if not sys.stdout.isatty(): - raise unittest.SkipTest("sys.stdout is not a tty") + if not sys.__stdout__.isatty(): + raise unittest.SkipTest("sys.__stdout__ is not a tty") # testing setupterm() inside initscr/endwin # causes terminal breakage - curses.setupterm(fd=sys.stdout.fileno()) + curses.setupterm(fd=sys.__stdout__.fileno()) try: stdscr = curses.initscr() main(stdscr) -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Aug 13 20:19:40 2011 From: python-checkins at python.org (antoine.pitrou) Date: Sat, 13 Aug 2011 20:19:40 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Issue_=2312744=3A_Fix_ineff?= =?utf8?q?icient_representation_of_integers?= Message-ID: http://hg.python.org/cpython/rev/8e824e09924a changeset: 71860:8e824e09924a user: Antoine Pitrou date: Sat Aug 13 20:15:19 2011 +0200 summary: Issue #12744: Fix inefficient representation of integers between 2**31 and 2**63 on systems with a 64-bit C "long". files: Lib/test/pickletester.py | 10 ++++++++++ Misc/NEWS | 3 +++ Modules/_pickle.c | 2 +- 3 files changed, 14 insertions(+), 1 deletions(-) diff --git a/Lib/test/pickletester.py b/Lib/test/pickletester.py --- a/Lib/test/pickletester.py +++ b/Lib/test/pickletester.py @@ -1118,6 +1118,16 @@ empty = self.loads(b'\x80\x03U\x00q\x00.', encoding='koi8-r') self.assertEqual(empty, '') + def test_int_pickling_efficiency(self): + # Test compacity of int representation (see issue #12744) + for proto in protocols: + sizes = [len(self.dumps(2**n, proto)) for n in range(70)] + # the size function is monotonous + self.assertEqual(sorted(sizes), sizes) + if proto >= 2: + self.assertLessEqual(sizes[-1], 14) + + # Test classes for reduce_ex class REX_one(object): diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -254,6 +254,9 @@ Library ------- +- Issue #12744: Fix inefficient representation of integers between 2**31 and + 2**63 on systems with a 64-bit C "long". + - Issue #12646: Add an 'eof' attribute to zlib.Decompress, to make it easier to detect truncated input streams. diff --git a/Modules/_pickle.c b/Modules/_pickle.c --- a/Modules/_pickle.c +++ b/Modules/_pickle.c @@ -1540,7 +1540,7 @@ /* out of range for int pickling */ PyErr_Clear(); } - else + else if (val <= 0x7fffffffL && val >= -0x80000000L) return save_int(self, val); if (self->proto >= 2) { -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Aug 13 20:42:35 2011 From: python-checkins at python.org (antoine.pitrou) Date: Sat, 13 Aug 2011 20:42:35 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Followup_to_8e824e09924a=3A?= =?utf8?q?_fix_regression_on_32-bit_builds?= Message-ID: http://hg.python.org/cpython/rev/72aaaff280d1 changeset: 71861:72aaaff280d1 user: Antoine Pitrou date: Sat Aug 13 20:40:32 2011 +0200 summary: Followup to 8e824e09924a: fix regression on 32-bit builds files: Modules/_pickle.c | 7 +++++-- 1 files changed, 5 insertions(+), 2 deletions(-) diff --git a/Modules/_pickle.c b/Modules/_pickle.c --- a/Modules/_pickle.c +++ b/Modules/_pickle.c @@ -1540,8 +1540,11 @@ /* out of range for int pickling */ PyErr_Clear(); } - else if (val <= 0x7fffffffL && val >= -0x80000000L) - return save_int(self, val); + else +#if SIZEOF_LONG > 4 + if (val <= 0x7fffffffL && val >= -0x80000000L) +#endif + return save_int(self, val); if (self->proto >= 2) { /* Linear-time pickling. */ -- Repository URL: http://hg.python.org/cpython From tjreedy at udel.edu Sat Aug 13 23:17:40 2011 From: tjreedy at udel.edu (Terry Reedy) Date: Sat, 13 Aug 2011 17:17:40 -0400 Subject: [Python-checkins] cpython: Issue #12744: Fix inefficient representation of integers In-Reply-To: References: Message-ID: <4E46E9F4.8030605@udel.edu> On 8/13/2011 2:19 PM, antoine.pitrou wrote: > + # the size function is monotonous /monotonous/monotonic/ ;-) Terry From python-checkins at python.org Sun Aug 14 01:53:55 2011 From: python-checkins at python.org (antoine.pitrou) Date: Sun, 14 Aug 2011 01:53:55 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Monotonic=2C_not_monotonous?= Message-ID: http://hg.python.org/cpython/rev/0273d0734593 changeset: 71862:0273d0734593 user: Antoine Pitrou date: Sun Aug 14 01:51:52 2011 +0200 summary: Monotonic, not monotonous files: Lib/test/pickletester.py | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Lib/test/pickletester.py b/Lib/test/pickletester.py --- a/Lib/test/pickletester.py +++ b/Lib/test/pickletester.py @@ -1122,7 +1122,7 @@ # Test compacity of int representation (see issue #12744) for proto in protocols: sizes = [len(self.dumps(2**n, proto)) for n in range(70)] - # the size function is monotonous + # the size function is monotonic self.assertEqual(sorted(sizes), sizes) if proto >= 2: self.assertLessEqual(sizes[-1], 14) -- Repository URL: http://hg.python.org/cpython From ncoghlan at gmail.com Sun Aug 14 03:44:53 2011 From: ncoghlan at gmail.com (Nick Coghlan) Date: Sun, 14 Aug 2011 11:44:53 +1000 Subject: [Python-checkins] cpython: Monotonic, not monotonous In-Reply-To: References: Message-ID: On Sun, Aug 14, 2011 at 9:53 AM, antoine.pitrou wrote: > http://hg.python.org/cpython/rev/0273d0734593 > changeset: ? 71862:0273d0734593 > user: ? ? ? ?Antoine Pitrou > date: ? ? ? ?Sun Aug 14 01:51:52 2011 +0200 > summary: > ?Monotonic, not monotonous > > files: > ?Lib/test/pickletester.py | ?2 +- > ?1 files changed, 1 insertions(+), 1 deletions(-) I dunno, I reckon systematically testing pickles could get pretty monotonous, too ;) Cheers, Nick. -- Nick Coghlan?? |?? ncoghlan at gmail.com?? |?? Brisbane, Australia From solipsis at pitrou.net Sun Aug 14 05:22:45 2011 From: solipsis at pitrou.net (solipsis at pitrou.net) Date: Sun, 14 Aug 2011 05:22:45 +0200 Subject: [Python-checkins] Daily reference leaks (0273d0734593): sum=0 Message-ID: results for 0273d0734593 on branch "default" -------------------------------------------- Command line was: ['./python', '-m', 'test.regrtest', '-uall', '-R', '3:3:/home/antoine/cpython/refleaks/reflogsFU0Uv', '-x'] From python-checkins at python.org Sun Aug 14 07:30:04 2011 From: python-checkins at python.org (ezio.melotti) Date: Sun, 14 Aug 2011 07:30:04 +0200 Subject: [Python-checkins] =?utf8?b?Y3B5dGhvbiAoMi43KTogIzEyNzI1OiBmaXgg?= =?utf8?q?working=2E_Patch_by_Ben_Hayden=2E?= Message-ID: http://hg.python.org/cpython/rev/dfe6f0a603d2 changeset: 71863:dfe6f0a603d2 branch: 2.7 parent: 71845:70ace8ffa4ac user: Ezio Melotti date: Sun Aug 14 08:27:36 2011 +0300 summary: #12725: fix working. Patch by Ben Hayden. files: Doc/ACKS.txt | 1 + Doc/library/socket.rst | 6 +++--- Modules/socketmodule.c | 6 +++--- 3 files changed, 7 insertions(+), 6 deletions(-) diff --git a/Doc/ACKS.txt b/Doc/ACKS.txt --- a/Doc/ACKS.txt +++ b/Doc/ACKS.txt @@ -77,6 +77,7 @@ * Travis B. Hartwell * Tim Hatch * Janko Hauser + * Ben Hayden * Thomas Heller * Bernhard Herzog * Magnus L. Hetland diff --git a/Doc/library/socket.rst b/Doc/library/socket.rst --- a/Doc/library/socket.rst +++ b/Doc/library/socket.rst @@ -488,7 +488,7 @@ .. function:: getdefaulttimeout() - Return the default timeout in floating seconds for new socket objects. A value + Return the default timeout in seconds (float) for new socket objects. A value of ``None`` indicates that new socket objects have no timeout. When the socket module is first imported, the default is ``None``. @@ -497,7 +497,7 @@ .. function:: setdefaulttimeout(timeout) - Set the default timeout in floating seconds for new socket objects. A value of + Set the default timeout in seconds (float) for new socket objects. A value of ``None`` indicates that new socket objects have no timeout. When the socket module is first imported, the default is ``None``. @@ -766,7 +766,7 @@ .. method:: socket.gettimeout() - Return the timeout in floating seconds associated with socket operations, or + Return the timeout in seconds (float) associated with socket operations, or ``None`` if no timeout is set. This reflects the last call to :meth:`setblocking` or :meth:`settimeout`. diff --git a/Modules/socketmodule.c b/Modules/socketmodule.c --- a/Modules/socketmodule.c +++ b/Modules/socketmodule.c @@ -1784,7 +1784,7 @@ PyDoc_STRVAR(gettimeout_doc, "gettimeout() -> timeout\n\ \n\ -Returns the timeout in floating seconds associated with socket \n\ +Returns the timeout in seconds (float) associated with socket \n\ operations. A timeout of None indicates that timeouts on socket \n\ operations are disabled."); @@ -4252,7 +4252,7 @@ PyDoc_STRVAR(getdefaulttimeout_doc, "getdefaulttimeout() -> timeout\n\ \n\ -Returns the default timeout in floating seconds for new socket objects.\n\ +Returns the default timeout in seconds (float) for new socket objects.\n\ A value of None indicates that new socket objects have no timeout.\n\ When the socket module is first imported, the default is None."); @@ -4282,7 +4282,7 @@ PyDoc_STRVAR(setdefaulttimeout_doc, "setdefaulttimeout(timeout)\n\ \n\ -Set the default timeout in floating seconds for new socket objects.\n\ +Set the default timeout in seconds (float) for new socket objects.\n\ A value of None indicates that new socket objects have no timeout.\n\ When the socket module is first imported, the default is None."); -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sun Aug 14 07:30:05 2011 From: python-checkins at python.org (ezio.melotti) Date: Sun, 14 Aug 2011 07:30:05 +0200 Subject: [Python-checkins] =?utf8?b?Y3B5dGhvbiAoMy4yKTogIzEyNzI1OiBmaXgg?= =?utf8?q?working=2E_Patch_by_Ben_Hayden=2E?= Message-ID: http://hg.python.org/cpython/rev/ab3432a81c26 changeset: 71864:ab3432a81c26 branch: 3.2 parent: 71855:843cd43206b4 user: Ezio Melotti date: Sun Aug 14 08:28:57 2011 +0300 summary: #12725: fix working. Patch by Ben Hayden. files: Doc/ACKS.txt | 1 + Doc/library/socket.rst | 6 +++--- Modules/socketmodule.c | 6 +++--- 3 files changed, 7 insertions(+), 6 deletions(-) diff --git a/Doc/ACKS.txt b/Doc/ACKS.txt --- a/Doc/ACKS.txt +++ b/Doc/ACKS.txt @@ -79,6 +79,7 @@ * Travis B. Hartwell * Tim Hatch * Janko Hauser + * Ben Hayden * Thomas Heller * Bernhard Herzog * Magnus L. Hetland diff --git a/Doc/library/socket.rst b/Doc/library/socket.rst --- a/Doc/library/socket.rst +++ b/Doc/library/socket.rst @@ -513,14 +513,14 @@ .. function:: getdefaulttimeout() - Return the default timeout in floating seconds for new socket objects. A value + Return the default timeout in seconds (float) for new socket objects. A value of ``None`` indicates that new socket objects have no timeout. When the socket module is first imported, the default is ``None``. .. function:: setdefaulttimeout(timeout) - Set the default timeout in floating seconds for new socket objects. When + Set the default timeout in seconds (float) for new socket objects. When the socket module is first imported, the default is ``None``. See :meth:`~socket.settimeout` for possible values and their respective meanings. @@ -632,7 +632,7 @@ .. method:: socket.gettimeout() - Return the timeout in floating seconds associated with socket operations, + Return the timeout in seconds (float) associated with socket operations, or ``None`` if no timeout is set. This reflects the last call to :meth:`setblocking` or :meth:`settimeout`. diff --git a/Modules/socketmodule.c b/Modules/socketmodule.c --- a/Modules/socketmodule.c +++ b/Modules/socketmodule.c @@ -1808,7 +1808,7 @@ PyDoc_STRVAR(gettimeout_doc, "gettimeout() -> timeout\n\ \n\ -Returns the timeout in floating seconds associated with socket \n\ +Returns the timeout in seconds (float) associated with socket \n\ operations. A timeout of None indicates that timeouts on socket \n\ operations are disabled."); @@ -4201,7 +4201,7 @@ PyDoc_STRVAR(getdefaulttimeout_doc, "getdefaulttimeout() -> timeout\n\ \n\ -Returns the default timeout in floating seconds for new socket objects.\n\ +Returns the default timeout in seconds (float) for new socket objects.\n\ A value of None indicates that new socket objects have no timeout.\n\ When the socket module is first imported, the default is None."); @@ -4231,7 +4231,7 @@ PyDoc_STRVAR(setdefaulttimeout_doc, "setdefaulttimeout(timeout)\n\ \n\ -Set the default timeout in floating seconds for new socket objects.\n\ +Set the default timeout in seconds (float) for new socket objects.\n\ A value of None indicates that new socket objects have no timeout.\n\ When the socket module is first imported, the default is None."); -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sun Aug 14 07:30:06 2011 From: python-checkins at python.org (ezio.melotti) Date: Sun, 14 Aug 2011 07:30:06 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=28merge_3=2E2_-=3E_default=29?= =?utf8?q?=3A_=2312725=3A_merge_with_3=2E2=2E?= Message-ID: http://hg.python.org/cpython/rev/49e9e34da512 changeset: 71865:49e9e34da512 parent: 71862:0273d0734593 parent: 71864:ab3432a81c26 user: Ezio Melotti date: Sun Aug 14 08:29:49 2011 +0300 summary: #12725: merge with 3.2. files: Doc/ACKS.txt | 1 + Doc/library/socket.rst | 6 +++--- Modules/socketmodule.c | 6 +++--- 3 files changed, 7 insertions(+), 6 deletions(-) diff --git a/Doc/ACKS.txt b/Doc/ACKS.txt --- a/Doc/ACKS.txt +++ b/Doc/ACKS.txt @@ -79,6 +79,7 @@ * Travis B. Hartwell * Tim Hatch * Janko Hauser + * Ben Hayden * Thomas Heller * Bernhard Herzog * Magnus L. Hetland diff --git a/Doc/library/socket.rst b/Doc/library/socket.rst --- a/Doc/library/socket.rst +++ b/Doc/library/socket.rst @@ -513,14 +513,14 @@ .. function:: getdefaulttimeout() - Return the default timeout in floating seconds for new socket objects. A value + Return the default timeout in seconds (float) for new socket objects. A value of ``None`` indicates that new socket objects have no timeout. When the socket module is first imported, the default is ``None``. .. function:: setdefaulttimeout(timeout) - Set the default timeout in floating seconds for new socket objects. When + Set the default timeout in seconds (float) for new socket objects. When the socket module is first imported, the default is ``None``. See :meth:`~socket.settimeout` for possible values and their respective meanings. @@ -675,7 +675,7 @@ .. method:: socket.gettimeout() - Return the timeout in floating seconds associated with socket operations, + Return the timeout in seconds (float) associated with socket operations, or ``None`` if no timeout is set. This reflects the last call to :meth:`setblocking` or :meth:`settimeout`. diff --git a/Modules/socketmodule.c b/Modules/socketmodule.c --- a/Modules/socketmodule.c +++ b/Modules/socketmodule.c @@ -1822,7 +1822,7 @@ PyDoc_STRVAR(gettimeout_doc, "gettimeout() -> timeout\n\ \n\ -Returns the timeout in floating seconds associated with socket \n\ +Returns the timeout in seconds (float) associated with socket \n\ operations. A timeout of None indicates that timeouts on socket \n\ operations are disabled."); @@ -4247,7 +4247,7 @@ PyDoc_STRVAR(getdefaulttimeout_doc, "getdefaulttimeout() -> timeout\n\ \n\ -Returns the default timeout in floating seconds for new socket objects.\n\ +Returns the default timeout in seconds (float) for new socket objects.\n\ A value of None indicates that new socket objects have no timeout.\n\ When the socket module is first imported, the default is None."); @@ -4277,7 +4277,7 @@ PyDoc_STRVAR(setdefaulttimeout_doc, "setdefaulttimeout(timeout)\n\ \n\ -Set the default timeout in floating seconds for new socket objects.\n\ +Set the default timeout in seconds (float) for new socket objects.\n\ A value of None indicates that new socket objects have no timeout.\n\ When the socket module is first imported, the default is None."); -- Repository URL: http://hg.python.org/cpython From solipsis at pitrou.net Mon Aug 15 05:26:21 2011 From: solipsis at pitrou.net (solipsis at pitrou.net) Date: Mon, 15 Aug 2011 05:26:21 +0200 Subject: [Python-checkins] Daily reference leaks (49e9e34da512): sum=0 Message-ID: results for 49e9e34da512 on branch "default" -------------------------------------------- Command line was: ['./python', '-m', 'test.regrtest', '-uall', '-R', '3:3:/home/antoine/cpython/refleaks/reflogCcPDoN', '-x'] From python-checkins at python.org Mon Aug 15 08:22:44 2011 From: python-checkins at python.org (ezio.melotti) Date: Mon, 15 Aug 2011 08:22:44 +0200 Subject: [Python-checkins] =?utf8?b?Y3B5dGhvbiAoMy4yKTogIzEyMjY2OiBGaXgg?= =?utf8?q?str=2Ecapitalize=28=29_to_correctly_uppercase/lowercase_titlecas?= =?utf8?q?ed_and?= Message-ID: http://hg.python.org/cpython/rev/c34772013c53 changeset: 71866:c34772013c53 branch: 3.2 parent: 71864:ab3432a81c26 user: Ezio Melotti date: Mon Aug 15 09:09:57 2011 +0300 summary: #12266: Fix str.capitalize() to correctly uppercase/lowercase titlecased and cased non-letter characters. files: Lib/test/string_tests.py | 17 +++++++++++++++++ Misc/NEWS | 12 ++++++++++++ Objects/unicodeobject.c | 4 ++-- 3 files changed, 31 insertions(+), 2 deletions(-) diff --git a/Lib/test/string_tests.py b/Lib/test/string_tests.py --- a/Lib/test/string_tests.py +++ b/Lib/test/string_tests.py @@ -641,6 +641,23 @@ self.checkequal('Aaaa', 'aaaa', 'capitalize') self.checkequal('Aaaa', 'AaAa', 'capitalize') + # check that titlecased chars are lowered correctly + # \u1ffc is the titlecased char + self.checkequal('\u1ffc\u1ff3\u1ff3\u1ff3', + '\u1ff3\u1ff3\u1ffc\u1ffc', 'capitalize') + # check with cased non-letter chars + self.checkequal('\u24c5\u24e8\u24e3\u24d7\u24de\u24dd', + '\u24c5\u24ce\u24c9\u24bd\u24c4\u24c3', 'capitalize') + self.checkequal('\u24c5\u24e8\u24e3\u24d7\u24de\u24dd', + '\u24df\u24e8\u24e3\u24d7\u24de\u24dd', 'capitalize') + self.checkequal('\u2160\u2171\u2172', + '\u2160\u2161\u2162', 'capitalize') + self.checkequal('\u2160\u2171\u2172', + '\u2170\u2171\u2172', 'capitalize') + # check with Ll chars with no upper - nothing changes here + self.checkequal('\u019b\u1d00\u1d86\u0221\u1fb7', + '\u019b\u1d00\u1d86\u0221\u1fb7', 'capitalize') + self.checkraises(TypeError, 'hello', 'capitalize', 42) def test_lower(self): diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -2,6 +2,18 @@ Python News +++++++++++ +What's New in Python 3.2.3? +=========================== + +*Release date: XX-XXX-2011* + +Core and Builtins +----------------- + +- Issue #12266: Fix str.capitalize() to correctly uppercase/lowercase + titlecased and cased non-letter characters. + + What's New in Python 3.2.2? =========================== diff --git a/Objects/unicodeobject.c b/Objects/unicodeobject.c --- a/Objects/unicodeobject.c +++ b/Objects/unicodeobject.c @@ -6658,13 +6658,13 @@ if (len == 0) return 0; - if (Py_UNICODE_ISLOWER(*s)) { + if (!Py_UNICODE_ISUPPER(*s)) { *s = Py_UNICODE_TOUPPER(*s); status = 1; } s++; while (--len > 0) { - if (Py_UNICODE_ISUPPER(*s)) { + if (!Py_UNICODE_ISLOWER(*s)) { *s = Py_UNICODE_TOLOWER(*s); status = 1; } -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Aug 15 08:22:45 2011 From: python-checkins at python.org (ezio.melotti) Date: Mon, 15 Aug 2011 08:22:45 +0200 Subject: [Python-checkins] =?utf8?b?Y3B5dGhvbiAoMi43KTogIzEyMjY2OiBGaXgg?= =?utf8?q?str=2Ecapitalize=28=29_to_correctly_uppercase/lowercase_titlecas?= =?utf8?q?ed_and?= Message-ID: http://hg.python.org/cpython/rev/eab17979a586 changeset: 71867:eab17979a586 branch: 2.7 parent: 71863:dfe6f0a603d2 user: Ezio Melotti date: Mon Aug 15 09:22:24 2011 +0300 summary: #12266: Fix str.capitalize() to correctly uppercase/lowercase titlecased and cased non-letter characters. files: Lib/test/string_tests.py | 17 +++++++++++++++++ Misc/NEWS | 3 +++ Objects/unicodeobject.c | 4 ++-- 3 files changed, 22 insertions(+), 2 deletions(-) diff --git a/Lib/test/string_tests.py b/Lib/test/string_tests.py --- a/Lib/test/string_tests.py +++ b/Lib/test/string_tests.py @@ -96,6 +96,23 @@ self.checkequal('Aaaa', 'aaaa', 'capitalize') self.checkequal('Aaaa', 'AaAa', 'capitalize') + # check that titlecased chars are lowered correctly + # \u1ffc is the titlecased char + self.checkequal(u'\u1ffc\u1ff3\u1ff3\u1ff3', + u'\u1ff3\u1ff3\u1ffc\u1ffc', 'capitalize') + # check with cased non-letter chars + self.checkequal(u'\u24c5\u24e8\u24e3\u24d7\u24de\u24dd', + u'\u24c5\u24ce\u24c9\u24bd\u24c4\u24c3', 'capitalize') + self.checkequal(u'\u24c5\u24e8\u24e3\u24d7\u24de\u24dd', + u'\u24df\u24e8\u24e3\u24d7\u24de\u24dd', 'capitalize') + self.checkequal(u'\u2160\u2171\u2172', + u'\u2160\u2161\u2162', 'capitalize') + self.checkequal(u'\u2160\u2171\u2172', + u'\u2170\u2171\u2172', 'capitalize') + # check with Ll chars with no upper - nothing changes here + self.checkequal(u'\u019b\u1d00\u1d86\u0221\u1fb7', + u'\u019b\u1d00\u1d86\u0221\u1fb7', 'capitalize') + self.checkraises(TypeError, 'hello', 'capitalize', 42) def test_count(self): diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -9,6 +9,9 @@ Core and Builtins ----------------- +- Issue #12266: Fix str.capitalize() to correctly uppercase/lowercase + titlecased and cased non-letter characters. + - Issues #12610 and #12609: Verify that user generated AST has correct string and identifier types before compiling. diff --git a/Objects/unicodeobject.c b/Objects/unicodeobject.c --- a/Objects/unicodeobject.c +++ b/Objects/unicodeobject.c @@ -5485,13 +5485,13 @@ if (len == 0) return 0; - if (Py_UNICODE_ISLOWER(*s)) { + if (!Py_UNICODE_ISUPPER(*s)) { *s = Py_UNICODE_TOUPPER(*s); status = 1; } s++; while (--len > 0) { - if (Py_UNICODE_ISUPPER(*s)) { + if (!Py_UNICODE_ISLOWER(*s)) { *s = Py_UNICODE_TOLOWER(*s); status = 1; } -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Aug 15 08:26:42 2011 From: python-checkins at python.org (ezio.melotti) Date: Mon, 15 Aug 2011 08:26:42 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=28merge_3=2E2_-=3E_default=29?= =?utf8?q?=3A_=2312266=3A_merge_with_3=2E2=2E?= Message-ID: http://hg.python.org/cpython/rev/1ea72da11724 changeset: 71868:1ea72da11724 parent: 71865:49e9e34da512 parent: 71866:c34772013c53 user: Ezio Melotti date: Mon Aug 15 09:26:28 2011 +0300 summary: #12266: merge with 3.2. files: Lib/test/string_tests.py | 17 +++++++++++++++++ Misc/NEWS | 3 +++ Objects/unicodeobject.c | 4 ++-- 3 files changed, 22 insertions(+), 2 deletions(-) diff --git a/Lib/test/string_tests.py b/Lib/test/string_tests.py --- a/Lib/test/string_tests.py +++ b/Lib/test/string_tests.py @@ -641,6 +641,23 @@ self.checkequal('Aaaa', 'aaaa', 'capitalize') self.checkequal('Aaaa', 'AaAa', 'capitalize') + # check that titlecased chars are lowered correctly + # \u1ffc is the titlecased char + self.checkequal('\u1ffc\u1ff3\u1ff3\u1ff3', + '\u1ff3\u1ff3\u1ffc\u1ffc', 'capitalize') + # check with cased non-letter chars + self.checkequal('\u24c5\u24e8\u24e3\u24d7\u24de\u24dd', + '\u24c5\u24ce\u24c9\u24bd\u24c4\u24c3', 'capitalize') + self.checkequal('\u24c5\u24e8\u24e3\u24d7\u24de\u24dd', + '\u24df\u24e8\u24e3\u24d7\u24de\u24dd', 'capitalize') + self.checkequal('\u2160\u2171\u2172', + '\u2160\u2161\u2162', 'capitalize') + self.checkequal('\u2160\u2171\u2172', + '\u2170\u2171\u2172', 'capitalize') + # check with Ll chars with no upper - nothing changes here + self.checkequal('\u019b\u1d00\u1d86\u0221\u1fb7', + '\u019b\u1d00\u1d86\u0221\u1fb7', 'capitalize') + self.checkraises(TypeError, 'hello', 'capitalize', 42) def test_lower(self): diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -10,6 +10,9 @@ Core and Builtins ----------------- +- Issue #12266: Fix str.capitalize() to correctly uppercase/lowercase + titlecased and cased non-letter characters. + - Issue #12732: In narrow unicode builds, allow Unicode identifiers which fall outside the BMP. diff --git a/Objects/unicodeobject.c b/Objects/unicodeobject.c --- a/Objects/unicodeobject.c +++ b/Objects/unicodeobject.c @@ -6733,13 +6733,13 @@ if (len == 0) return 0; - if (Py_UNICODE_ISLOWER(*s)) { + if (!Py_UNICODE_ISUPPER(*s)) { *s = Py_UNICODE_TOUPPER(*s); status = 1; } s++; while (--len > 0) { - if (Py_UNICODE_ISUPPER(*s)) { + if (!Py_UNICODE_ISLOWER(*s)) { *s = Py_UNICODE_TOLOWER(*s); status = 1; } -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Aug 15 09:04:58 2011 From: python-checkins at python.org (ezio.melotti) Date: Mon, 15 Aug 2011 09:04:58 +0200 Subject: [Python-checkins] =?utf8?b?Y3B5dGhvbiAoMi43KTogIzEyMjY2OiBtb3Zl?= =?utf8?q?_the_tests_in_test=5Funicode=2E?= Message-ID: http://hg.python.org/cpython/rev/d3816fa1bcdf changeset: 71869:d3816fa1bcdf branch: 2.7 parent: 71867:eab17979a586 user: Ezio Melotti date: Mon Aug 15 10:04:28 2011 +0300 summary: #12266: move the tests in test_unicode. files: Lib/test/string_tests.py | 17 ----------------- Lib/test/test_unicode.py | 19 +++++++++++++++++++ 2 files changed, 19 insertions(+), 17 deletions(-) diff --git a/Lib/test/string_tests.py b/Lib/test/string_tests.py --- a/Lib/test/string_tests.py +++ b/Lib/test/string_tests.py @@ -96,23 +96,6 @@ self.checkequal('Aaaa', 'aaaa', 'capitalize') self.checkequal('Aaaa', 'AaAa', 'capitalize') - # check that titlecased chars are lowered correctly - # \u1ffc is the titlecased char - self.checkequal(u'\u1ffc\u1ff3\u1ff3\u1ff3', - u'\u1ff3\u1ff3\u1ffc\u1ffc', 'capitalize') - # check with cased non-letter chars - self.checkequal(u'\u24c5\u24e8\u24e3\u24d7\u24de\u24dd', - u'\u24c5\u24ce\u24c9\u24bd\u24c4\u24c3', 'capitalize') - self.checkequal(u'\u24c5\u24e8\u24e3\u24d7\u24de\u24dd', - u'\u24df\u24e8\u24e3\u24d7\u24de\u24dd', 'capitalize') - self.checkequal(u'\u2160\u2171\u2172', - u'\u2160\u2161\u2162', 'capitalize') - self.checkequal(u'\u2160\u2171\u2172', - u'\u2170\u2171\u2172', 'capitalize') - # check with Ll chars with no upper - nothing changes here - self.checkequal(u'\u019b\u1d00\u1d86\u0221\u1fb7', - u'\u019b\u1d00\u1d86\u0221\u1fb7', 'capitalize') - self.checkraises(TypeError, 'hello', 'capitalize', 42) def test_count(self): diff --git a/Lib/test/test_unicode.py b/Lib/test/test_unicode.py --- a/Lib/test/test_unicode.py +++ b/Lib/test/test_unicode.py @@ -269,6 +269,25 @@ # Surrogates on both sides, no fixup required self.assertTrue(u'\ud800\udc02' < u'\ud84d\udc56') + def test_capitalize(self): + string_tests.CommonTest.test_capitalize(self) + # check that titlecased chars are lowered correctly + # \u1ffc is the titlecased char + self.checkequal(u'\u1ffc\u1ff3\u1ff3\u1ff3', + u'\u1ff3\u1ff3\u1ffc\u1ffc', 'capitalize') + # check with cased non-letter chars + self.checkequal(u'\u24c5\u24e8\u24e3\u24d7\u24de\u24dd', + u'\u24c5\u24ce\u24c9\u24bd\u24c4\u24c3', 'capitalize') + self.checkequal(u'\u24c5\u24e8\u24e3\u24d7\u24de\u24dd', + u'\u24df\u24e8\u24e3\u24d7\u24de\u24dd', 'capitalize') + self.checkequal(u'\u2160\u2171\u2172', + u'\u2160\u2161\u2162', 'capitalize') + self.checkequal(u'\u2160\u2171\u2172', + u'\u2170\u2171\u2172', 'capitalize') + # check with Ll chars with no upper - nothing changes here + self.checkequal(u'\u019b\u1d00\u1d86\u0221\u1fb7', + u'\u019b\u1d00\u1d86\u0221\u1fb7', 'capitalize') + def test_islower(self): string_tests.MixinStrUnicodeUserStringTest.test_islower(self) self.checkequalnofix(False, u'\u1FFc', 'islower') -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Aug 15 10:07:52 2011 From: python-checkins at python.org (ezio.melotti) Date: Mon, 15 Aug 2011 10:07:52 +0200 (CEST) Subject: [Python-checkins] r88882 - in tracker/instances/python-dev/extensions: local_replace.py test/local_replace_data.txt Message-ID: <3RcqF45RFNzMV7@mail.python.org> Author: ezio.melotti Date: Mon Aug 15 10:07:52 2011 New Revision: 88882 Log: Add linkification for the devguide. Modified: tracker/instances/python-dev/extensions/local_replace.py tracker/instances/python-dev/extensions/test/local_replace_data.txt Modified: tracker/instances/python-dev/extensions/local_replace.py ============================================================================== --- tracker/instances/python-dev/extensions/local_replace.py (original) +++ tracker/instances/python-dev/extensions/local_replace.py Mon Aug 15 10:07:52 2011 @@ -100,6 +100,10 @@ # PEP 8, PEP8, PEP 0008, ... (re.compile(r'PEP\s*(\d{1,4})\b', re.I), make_pep_link), + + # devguide + (re.compile(r'(?\1'), ] Modified: tracker/instances/python-dev/extensions/test/local_replace_data.txt ============================================================================== --- tracker/instances/python-dev/extensions/test/local_replace_data.txt (original) +++ tracker/instances/python-dev/extensions/test/local_replace_data.txt Mon Aug 15 10:07:52 2011 @@ -190,3 +190,19 @@ see PEP 15000 write a PEP! write a PEP! +## +## +see the devguide. +see the devguide. +see http://docs.python.org/devguide/. +see http://docs.python.org/devguide/. +see devguide/triaging. +see devguide/triaging. +see devguide/triaging.html. +see devguide/triaging.html. +see http://docs.python.org/devguide/committing.html#using-several-working-copies +see http://docs.python.org/devguide/committing.html#using-several-working-copies +see devguide/committing.html#using-several-working-copies +see devguide/committing.html#using-several-working-copies +see devguide/committing#using-several-working-copies +see devguide/committing#using-several-working-copies From python-checkins at python.org Mon Aug 15 10:16:41 2011 From: python-checkins at python.org (ezio.melotti) Date: Mon, 15 Aug 2011 10:16:41 +0200 Subject: [Python-checkins] =?utf8?q?devguide=3A_=2312711=3A_document_the_t?= =?utf8?q?racker_components=2E?= Message-ID: http://hg.python.org/devguide/rev/c9dd231b0940 changeset: 441:c9dd231b0940 user: Ezio Melotti date: Mon Aug 15 11:16:29 2011 +0300 summary: #12711: document the tracker components. files: triaging.rst | 80 ++++++++++++++++++++++++++++++++++++++++ 1 files changed, 80 insertions(+), 0 deletions(-) diff --git a/triaging.rst b/triaging.rst --- a/triaging.rst +++ b/triaging.rst @@ -52,6 +52,58 @@ i.e. the issue tracker may automatically fill in the `Assigned To`_ field after you press ``Submit changes``. +The following component(s) should be selected if the issue applies to: + +2to3 (2.x to 3.0 conversion tool) + The 2to3 conversion tool in `Lib/lib2to3`_. +Benchmarks + The benchmarks in the benchmarks_ repo. +Build + The build process. +ctypes + The ctypes package in `Lib/ctypes`_. +Demos and Tools + The files in Tools_ and `Tools/demo`_. +Devguide + The `Developer's guide`_. +Distutils + The distutils package in `Lib/distutils`_. +Distutils2 + The packaging module in `Lib/packaging`_. +Documentation + The documentation in Doc_ (used to build the HTML doc at http://docs.python.org/). +Extension Modules + C modules in Modules_. +IDLE + The `Lib/idlelib`_ package. +Installation + The installation process. +Interpreter Core + The interpreter core, the built-in objects in `Objects`_, the `Python`_, + `Grammar`_ and `Parser`_ dirs. +IO + The I/O system, `Lib/io.py`_ and `Modules/_io`_. +Library (Lib) + Python modules in Lib_. +Macintosh + The Mac OS X operating system. +Regular Expressions + The `Lib/re.py`_ and `Modules/_sre.c`_ modules. +Tests + The unittest and doctest frameworks in `Lib/unittest`_ and + `Lib/doctest.py`_. + + The CPython tests in `Lib/test`_, the test runner in `Lib/test/regrtest.py`_ + and the `Lib/test/support.py`_ module. +Tkinter + The `Lib/tkinter`_ package. +Unicode + Unicode, codecs, str vs bytes, `Objects/unicodeobject.c`_. +Windows + The Windows operating system. +XML + The `Lib/xml`_ package. + Versions '''''''' The known versions of Python that the issue affects and should be fixed for. @@ -220,6 +272,34 @@ tracker itself. +.. _Doc: http://hg.python.org/cpython/file/default/Doc/ +.. _Grammar: http://hg.python.org/cpython/file/default/Grammar/ +.. _Lib: http://hg.python.org/cpython/file/default/Lib/ +.. _Lib/lib2to3: http://hg.python.org/cpython/file/default/Lib/lib2to3/ +.. _Lib/ctypes: http://hg.python.org/cpython/file/default/Lib/ctypes/ +.. _Lib/distutils: http://hg.python.org/cpython/file/default/Lib/distutils/ +.. _Lib/doctest.py: http://hg.python.org/cpython/file/default/Lib/doctest.py +.. _Lib/idlelib: http://hg.python.org/cpython/file/default/Lib/idlelib/ +.. _Lib/io.py: http://hg.python.org/cpython/file/default/Lib/io.py +.. _Lib/packaging: http://hg.python.org/cpython/file/default/Lib/packaging/ +.. _Lib/re.py: http://hg.python.org/cpython/file/default/Lib/re.py +.. _Lib/test: http://hg.python.org/cpython/file/default/Lib/test/ +.. _Lib/test/regrtest.py: http://hg.python.org/cpython/file/default/Lib/test/regrtest.py +.. _Lib/test/support.py: http://hg.python.org/cpython/file/default/Lib/test/support.py +.. _Lib/tkinter: http://hg.python.org/cpython/file/default/Lib/tkinter/ +.. _Lib/unittest: http://hg.python.org/cpython/file/default/Lib/unittest/ +.. _Lib/xml: http://hg.python.org/cpython/file/default/Lib/xml/ +.. _Modules: http://hg.python.org/cpython/file/default/Modules/ +.. _Modules/_io: http://hg.python.org/cpython/file/default/Modules/_io/ +.. _Modules/_sre.c: http://hg.python.org/cpython/file/default/Modules/_sre.c +.. _Objects: http://hg.python.org/cpython/file/default/Objects/ +.. _Objects/unicodeobject.c: http://hg.python.org/cpython/file/default/Objects/unicodeobject.c +.. _Parser: http://hg.python.org/cpython/file/default/Parser/ +.. _Python: http://hg.python.org/cpython/file/default/Python/ +.. _Tools: http://hg.python.org/cpython/file/default/Tools/ +.. _Tools/demo: http://hg.python.org/cpython/file/default/Tools/demo/ +.. _benchmarks: http://hg.python.org/benchmarks/ +.. _Developer's guide: http://hg.python.org/devguide/ .. _GSoC: http://code.google.com/soc/ .. _issue tracker: http://bugs.python.org .. _language moratorium: http://www.python.org/dev/peps/pep-3003/ -- Repository URL: http://hg.python.org/devguide From python-checkins at python.org Mon Aug 15 10:33:58 2011 From: python-checkins at python.org (ezio.melotti) Date: Mon, 15 Aug 2011 10:33:58 +0200 Subject: [Python-checkins] =?utf8?q?devguide=3A_Document_the_devguide_link?= =?utf8?q?ing_introduced_in_r88882=2E?= Message-ID: http://hg.python.org/devguide/rev/8c69000e8da2 changeset: 442:8c69000e8da2 user: Ezio Melotti date: Mon Aug 15 11:22:11 2011 +0300 summary: Document the devguide linking introduced in r88882. files: triaging.rst | 3 +++ 1 files changed, 3 insertions(+), 0 deletions(-) diff --git a/triaging.rst b/triaging.rst --- a/triaging.rst +++ b/triaging.rst @@ -263,6 +263,9 @@ possibly linking to the line number specified after the ``:``. * ``PEP `` and ``PEP`` link to the :abbr:`PEP (Python Enhancement Proposal)` ````. +* ``devguide`` (lowercase), ``devguide/triaging``, and + ``devguide/triaging#generating-special-links-in-a-comment`` generate links to + the Devguide, this page, and this section respectively. Reporting Issues About the Tracker -- Repository URL: http://hg.python.org/devguide From python-checkins at python.org Mon Aug 15 10:34:00 2011 From: python-checkins at python.org (ezio.melotti) Date: Mon, 15 Aug 2011 10:34:00 +0200 Subject: [Python-checkins] =?utf8?q?devguide=3A_Document_the_=5B+=5D_butto?= =?utf8?q?n_and_the_autocomplete_of_the_nosy_list=2E?= Message-ID: http://hg.python.org/devguide/rev/0a128fc45fc5 changeset: 443:0a128fc45fc5 user: Ezio Melotti date: Mon Aug 15 11:32:31 2011 +0300 summary: Document the [+] button and the autocomplete of the nosy list. files: triaging.rst | 9 +++++++++ 1 files changed, 9 insertions(+), 0 deletions(-) diff --git a/triaging.rst b/triaging.rst --- a/triaging.rst +++ b/triaging.rst @@ -165,6 +165,15 @@ attention. Use the :ref:`experts` to know who wants to be added to the nosy list for issues targeting specific areas. +If you have Javascript enabled and permission to edit the nosy list, you can +use the ``[+]`` button to add yourself to the nosy (remember to click on +"Submit Changes" afterwards). Note that you are added to the nosy +automatically when you submit a message. +The nosy list also has an autocomplete that lets you search from the list of +developers and from the :ref:`experts`. The search is case-insensitive and +works for real names, modules, interest areas, etc., and only adds the +username(s) to the nosy once an entry is selected. + Assigned To ''''''''''' Who is expected to take the next step in resolving the issue. It is acceptable -- Repository URL: http://hg.python.org/devguide From python-checkins at python.org Mon Aug 15 13:29:01 2011 From: python-checkins at python.org (ezio.melotti) Date: Mon, 15 Aug 2011 13:29:01 +0200 Subject: [Python-checkins] =?utf8?b?Y3B5dGhvbiAoMy4yKTogIzEyMjA0OiBkb2N1?= =?utf8?q?ment_that_str=2Eupper=28=29=2Eisupper=28=29_might_be_False_and_a?= =?utf8?q?dd_a_note_about?= Message-ID: http://hg.python.org/cpython/rev/16edc5cf4a79 changeset: 71870:16edc5cf4a79 branch: 3.2 parent: 71866:c34772013c53 user: Ezio Melotti date: Mon Aug 15 14:27:19 2011 +0300 summary: #12204: document that str.upper().isupper() might be False and add a note about cased characters. files: Doc/library/stdtypes.rst | 41 +++++++++++++++------------ 1 files changed, 22 insertions(+), 19 deletions(-) diff --git a/Doc/library/stdtypes.rst b/Doc/library/stdtypes.rst --- a/Doc/library/stdtypes.rst +++ b/Doc/library/stdtypes.rst @@ -54,7 +54,7 @@ * instances of user-defined classes, if the class defines a :meth:`__bool__` or :meth:`__len__` method, when that method returns the integer zero or - :class:`bool` value ``False``. [#]_ + :class:`bool` value ``False``. [1]_ .. index:: single: true @@ -261,7 +261,7 @@ operands of different numeric types, the operand with the "narrower" type is widened to that of the other, where integer is narrower than floating point, which is narrower than complex. Comparisons between numbers of mixed type use -the same rule. [#]_ The constructors :func:`int`, :func:`float`, and +the same rule. [2]_ The constructors :func:`int`, :func:`float`, and :func:`complex` can be used to produce numbers of a specific type. All numeric types (except complex) support the following operations, sorted by @@ -852,7 +852,7 @@ Most sequence types support the following operations. The ``in`` and ``not in`` operations have the same priorities as the comparison operations. The ``+`` and ``*`` operations have the same priority as the corresponding numeric operations. -[#]_ Additional methods are provided for :ref:`typesseq-mutable`. +[3]_ Additional methods are provided for :ref:`typesseq-mutable`. This table lists the sequence operations sorted in ascending priority (operations in the same box have the same priority). In the table, *s* and *t* @@ -1137,10 +1137,8 @@ .. method:: str.islower() - Return true if all cased characters in the string are lowercase and there is at - least one cased character, false otherwise. Cased characters are those with - general category property being one of "Lu", "Ll", or "Lt" and lowercase characters - are those with general category property "Ll". + Return true if all cased characters [4]_ in the string are lowercase and + there is at least one cased character, false otherwise. .. method:: str.isnumeric() @@ -1180,10 +1178,8 @@ .. method:: str.isupper() - Return true if all cased characters in the string are uppercase and there is at - least one cased character, false otherwise. Cased characters are those with - general category property being one of "Lu", "Ll", or "Lt" and uppercase characters - are those with general category property "Lu". + Return true if all cased characters [4]_ in the string are uppercase and + there is at least one cased character, false otherwise. .. method:: str.join(iterable) @@ -1203,7 +1199,8 @@ .. method:: str.lower() - Return a copy of the string converted to lowercase. + Return a copy of the string with all the cased characters [4]_ converted to + lowercase. .. method:: str.lstrip([chars]) @@ -1404,7 +1401,10 @@ .. method:: str.upper() - Return a copy of the string converted to uppercase. + Return a copy of the string with all the cased characters [4]_ converted to + uppercase. Note that ``str.upper().isupper()`` might be ``False`` if ``s`` + contains uncased characters or if the Unicode category of the resulting + character(s) is not "Lu" (Letter, uppercase), but e.g. "Lt" (Letter, titlecase). .. method:: str.zfill(width) @@ -1444,7 +1444,7 @@ The effect is similar to the using :c:func:`sprintf` in the C language. If *format* requires a single argument, *values* may be a single non-tuple -object. [#]_ Otherwise, *values* must be a tuple with exactly the number of +object. [5]_ Otherwise, *values* must be a tuple with exactly the number of items specified by the format string, or a single mapping object (for example, a dictionary). @@ -2808,13 +2808,16 @@ .. rubric:: Footnotes -.. [#] Additional information on these special methods may be found in the Python +.. [1] Additional information on these special methods may be found in the Python Reference Manual (:ref:`customization`). -.. [#] As a consequence, the list ``[1, 2]`` is considered equal to ``[1.0, 2.0]``, and +.. [2] As a consequence, the list ``[1, 2]`` is considered equal to ``[1.0, 2.0]``, and similarly for tuples. -.. [#] They must have since the parser can't tell the type of the operands. - -.. [#] To format only a tuple you should therefore provide a singleton tuple whose only +.. [3] They must have since the parser can't tell the type of the operands. + +.. [4] Cased characters are those with general category property being one of + "Lu" (Letter, uppercase), "Ll" (Letter, lowercase), or "Lt" (Letter, titlecase). + +.. [5] To format only a tuple you should therefore provide a singleton tuple whose only element is the tuple to be formatted. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Aug 15 13:29:02 2011 From: python-checkins at python.org (ezio.melotti) Date: Mon, 15 Aug 2011 13:29:02 +0200 Subject: [Python-checkins] =?utf8?b?Y3B5dGhvbiAoMi43KTogIzEyMjA0OiBkb2N1?= =?utf8?q?ment_that_str=2Eupper=28=29=2Eisupper=28=29_might_be_False_and_a?= =?utf8?q?dd_a_note_about?= Message-ID: http://hg.python.org/cpython/rev/fb49394f75ed changeset: 71871:fb49394f75ed branch: 2.7 parent: 71869:d3816fa1bcdf user: Ezio Melotti date: Mon Aug 15 14:24:15 2011 +0300 summary: #12204: document that str.upper().isupper() might be False and add a note about cased characters. files: Doc/library/stdtypes.rst | 33 ++++++++++++++++----------- 1 files changed, 20 insertions(+), 13 deletions(-) diff --git a/Doc/library/stdtypes.rst b/Doc/library/stdtypes.rst --- a/Doc/library/stdtypes.rst +++ b/Doc/library/stdtypes.rst @@ -63,7 +63,7 @@ * instances of user-defined classes, if the class defines a :meth:`__nonzero__` or :meth:`__len__` method, when that method returns the integer zero or - :class:`bool` value ``False``. [#]_ + :class:`bool` value ``False``. [1]_ .. index:: single: true @@ -277,7 +277,7 @@ operands of different numeric types, the operand with the "narrower" type is widened to that of the other, where plain integer is narrower than long integer is narrower than floating point is narrower than complex. Comparisons between -numbers of mixed type use the same rule. [#]_ The constructors :func:`int`, +numbers of mixed type use the same rule. [2]_ The constructors :func:`int`, :func:`long`, :func:`float`, and :func:`complex` can be used to produce numbers of a specific type. @@ -709,7 +709,7 @@ Most sequence types support the following operations. The ``in`` and ``not in`` operations have the same priorities as the comparison operations. The ``+`` and ``*`` operations have the same priority as the corresponding numeric operations. -[#]_ Additional methods are provided for :ref:`typesseq-mutable`. +[3]_ Additional methods are provided for :ref:`typesseq-mutable`. This table lists the sequence operations sorted in ascending priority (operations in the same box have the same priority). In the table, *s* and *t* @@ -1007,7 +1007,7 @@ .. method:: str.islower() - Return true if all cased characters in the string are lowercase and there is at + Return true if all cased characters [4]_ in the string are lowercase and there is at least one cased character, false otherwise. For 8-bit strings, this method is locale-dependent. @@ -1032,7 +1032,7 @@ .. method:: str.isupper() - Return true if all cased characters in the string are uppercase and there is at + Return true if all cased characters [4]_ in the string are uppercase and there is at least one cased character, false otherwise. For 8-bit strings, this method is locale-dependent. @@ -1057,7 +1057,8 @@ .. method:: str.lower() - Return a copy of the string converted to lowercase. + Return a copy of the string with all the cased characters [4]_ converted to + lowercase. For 8-bit strings, this method is locale-dependent. @@ -1280,7 +1281,10 @@ .. method:: str.upper() - Return a copy of the string converted to uppercase. + Return a copy of the string with all the cased characters [4]_ converted to + uppercase. Note that ``str.upper().isupper()`` might be ``False`` if ``s`` + contains uncased characters or if the Unicode category of the resulting + character(s) is not "Lu" (Letter, uppercase), but e.g. "Lt" (Letter, titlecase). For 8-bit strings, this method is locale-dependent. @@ -1336,7 +1340,7 @@ the result will also be a Unicode object. If *format* requires a single argument, *values* may be a single non-tuple -object. [#]_ Otherwise, *values* must be a tuple with exactly the number of +object. [5]_ Otherwise, *values* must be a tuple with exactly the number of items specified by the format string, or a single mapping object (for example, a dictionary). @@ -3044,15 +3048,18 @@ .. rubric:: Footnotes -.. [#] Additional information on these special methods may be found in the Python +.. [1] Additional information on these special methods may be found in the Python Reference Manual (:ref:`customization`). -.. [#] As a consequence, the list ``[1, 2]`` is considered equal to ``[1.0, 2.0]``, and +.. [2] As a consequence, the list ``[1, 2]`` is considered equal to ``[1.0, 2.0]``, and similarly for tuples. -.. [#] They must have since the parser can't tell the type of the operands. - -.. [#] To format only a tuple you should therefore provide a singleton tuple whose only +.. [3] They must have since the parser can't tell the type of the operands. + +.. [4] Cased characters are those with general category property being one of + "Lu" (Letter, uppercase), "Ll" (Letter, lowercase), or "Lt" (Letter, titlecase). + +.. [5] To format only a tuple you should therefore provide a singleton tuple whose only element is the tuple to be formatted. .. [#] The advantage of leaving the newline on is that returning an empty string is -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Aug 15 13:29:03 2011 From: python-checkins at python.org (ezio.melotti) Date: Mon, 15 Aug 2011 13:29:03 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=28merge_3=2E2_-=3E_default=29?= =?utf8?q?=3A_=2312204=3A_merge_with_3=2E2=2E?= Message-ID: http://hg.python.org/cpython/rev/c821e3a54930 changeset: 71872:c821e3a54930 parent: 71868:1ea72da11724 parent: 71870:16edc5cf4a79 user: Ezio Melotti date: Mon Aug 15 14:28:46 2011 +0300 summary: #12204: merge with 3.2. files: Doc/library/stdtypes.rst | 41 +++++++++++++++------------ 1 files changed, 22 insertions(+), 19 deletions(-) diff --git a/Doc/library/stdtypes.rst b/Doc/library/stdtypes.rst --- a/Doc/library/stdtypes.rst +++ b/Doc/library/stdtypes.rst @@ -54,7 +54,7 @@ * instances of user-defined classes, if the class defines a :meth:`__bool__` or :meth:`__len__` method, when that method returns the integer zero or - :class:`bool` value ``False``. [#]_ + :class:`bool` value ``False``. [1]_ .. index:: single: true @@ -261,7 +261,7 @@ operands of different numeric types, the operand with the "narrower" type is widened to that of the other, where integer is narrower than floating point, which is narrower than complex. Comparisons between numbers of mixed type use -the same rule. [#]_ The constructors :func:`int`, :func:`float`, and +the same rule. [2]_ The constructors :func:`int`, :func:`float`, and :func:`complex` can be used to produce numbers of a specific type. All numeric types (except complex) support the following operations, sorted by @@ -852,7 +852,7 @@ Most sequence types support the following operations. The ``in`` and ``not in`` operations have the same priorities as the comparison operations. The ``+`` and ``*`` operations have the same priority as the corresponding numeric operations. -[#]_ Additional methods are provided for :ref:`typesseq-mutable`. +[3]_ Additional methods are provided for :ref:`typesseq-mutable`. This table lists the sequence operations sorted in ascending priority (operations in the same box have the same priority). In the table, *s* and *t* @@ -1137,10 +1137,8 @@ .. method:: str.islower() - Return true if all cased characters in the string are lowercase and there is at - least one cased character, false otherwise. Cased characters are those with - general category property being one of "Lu", "Ll", or "Lt" and lowercase characters - are those with general category property "Ll". + Return true if all cased characters [4]_ in the string are lowercase and + there is at least one cased character, false otherwise. .. method:: str.isnumeric() @@ -1180,10 +1178,8 @@ .. method:: str.isupper() - Return true if all cased characters in the string are uppercase and there is at - least one cased character, false otherwise. Cased characters are those with - general category property being one of "Lu", "Ll", or "Lt" and uppercase characters - are those with general category property "Lu". + Return true if all cased characters [4]_ in the string are uppercase and + there is at least one cased character, false otherwise. .. method:: str.join(iterable) @@ -1203,7 +1199,8 @@ .. method:: str.lower() - Return a copy of the string converted to lowercase. + Return a copy of the string with all the cased characters [4]_ converted to + lowercase. .. method:: str.lstrip([chars]) @@ -1404,7 +1401,10 @@ .. method:: str.upper() - Return a copy of the string converted to uppercase. + Return a copy of the string with all the cased characters [4]_ converted to + uppercase. Note that ``str.upper().isupper()`` might be ``False`` if ``s`` + contains uncased characters or if the Unicode category of the resulting + character(s) is not "Lu" (Letter, uppercase), but e.g. "Lt" (Letter, titlecase). .. method:: str.zfill(width) @@ -1444,7 +1444,7 @@ The effect is similar to the using :c:func:`sprintf` in the C language. If *format* requires a single argument, *values* may be a single non-tuple -object. [#]_ Otherwise, *values* must be a tuple with exactly the number of +object. [5]_ Otherwise, *values* must be a tuple with exactly the number of items specified by the format string, or a single mapping object (for example, a dictionary). @@ -2821,13 +2821,16 @@ .. rubric:: Footnotes -.. [#] Additional information on these special methods may be found in the Python +.. [1] Additional information on these special methods may be found in the Python Reference Manual (:ref:`customization`). -.. [#] As a consequence, the list ``[1, 2]`` is considered equal to ``[1.0, 2.0]``, and +.. [2] As a consequence, the list ``[1, 2]`` is considered equal to ``[1.0, 2.0]``, and similarly for tuples. -.. [#] They must have since the parser can't tell the type of the operands. - -.. [#] To format only a tuple you should therefore provide a singleton tuple whose only +.. [3] They must have since the parser can't tell the type of the operands. + +.. [4] Cased characters are those with general category property being one of + "Lu" (Letter, uppercase), "Ll" (Letter, lowercase), or "Lt" (Letter, titlecase). + +.. [5] To format only a tuple you should therefore provide a singleton tuple whose only element is the tuple to be formatted. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Tue Aug 16 01:51:00 2011 From: python-checkins at python.org (barry.warsaw) Date: Tue, 16 Aug 2011 01:51:00 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=282=2E7=29=3A_The_simplest_po?= =?utf8?q?ssible_fix_for_the_regression_in_bug_12752_by_encoding_unicodes?= Message-ID: http://hg.python.org/cpython/rev/0d64fe6c737f changeset: 71873:0d64fe6c737f branch: 2.7 parent: 71871:fb49394f75ed user: Barry Warsaw date: Mon Aug 15 19:17:12 2011 -0400 summary: The simplest possible fix for the regression in bug 12752 by encoding unicodes to 8-bit strings. files: Lib/locale.py | 2 ++ Lib/test/test_locale.py | 5 +++++ 2 files changed, 7 insertions(+), 0 deletions(-) diff --git a/Lib/locale.py b/Lib/locale.py --- a/Lib/locale.py +++ b/Lib/locale.py @@ -355,6 +355,8 @@ """ # Normalize the locale name and extract the encoding + if isinstance(localename, unicode): + localename = localename.encode('ascii') fullname = localename.translate(_ascii_lower_map) if ':' in fullname: # ':' is sometimes used as encoding delimiter. diff --git a/Lib/test/test_locale.py b/Lib/test/test_locale.py --- a/Lib/test/test_locale.py +++ b/Lib/test/test_locale.py @@ -412,6 +412,11 @@ locale.setlocale(locale.LC_CTYPE, loc) self.assertEqual(loc, locale.getlocale()) + def test_normalize_issue12752(self): + # Issue #1813 caused a regression where locale.normalize() would no + # longer accept unicode strings. + self.assertEqual(locale.normalize(u'en_US'), 'en_US.ISO8859-1') + def test_main(): tests = [ -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Tue Aug 16 01:51:01 2011 From: python-checkins at python.org (barry.warsaw) Date: Tue, 16 Aug 2011 01:51:01 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=282=2E7=29=3A_NEWS_entry=2E?= Message-ID: http://hg.python.org/cpython/rev/baea9f5f973c changeset: 71874:baea9f5f973c branch: 2.7 user: Barry Warsaw date: Mon Aug 15 19:50:35 2011 -0400 summary: NEWS entry. files: Misc/NEWS | 3 +++ 1 files changed, 3 insertions(+), 0 deletions(-) diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -40,6 +40,9 @@ Library ------- +- Issue #12752: Fix regression which prevented locale.normalize() from + accepting unicode strings. + - Issue #12683: urlparse updated to include svn as schemes that uses relative paths. (svn from 1.5 onwards support relative path). -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Tue Aug 16 03:30:00 2011 From: python-checkins at python.org (antoine.pitrou) Date: Tue, 16 Aug 2011 03:30:00 +0200 Subject: [Python-checkins] =?utf8?q?peps=3A_Add_Alexandre=27s_suggestions?= Message-ID: http://hg.python.org/peps/rev/667bb9cd35e8 changeset: 3927:667bb9cd35e8 user: Antoine Pitrou date: Tue Aug 16 03:27:53 2011 +0200 summary: Add Alexandre's suggestions files: pep-3154.txt | 25 +++++++++++++++++++++++++ 1 files changed, 25 insertions(+), 0 deletions(-) diff --git a/pep-3154.txt b/pep-3154.txt --- a/pep-3154.txt +++ b/pep-3154.txt @@ -60,6 +60,26 @@ remove the current impossibility of pickling self-referential sets [2]_. +Calling __new__ with keyword arguments +-------------------------------------- + +Currently, classes whose __new__ mandates the use of keyword-only arguments +can not be pickled (or, rather, unpickled) [3]_. Both a new special method +(``__getnewargs_ex__`` ?) and a new opcode (NEWOBJEX ?) are needed. + +Serializing "pseudo-global" objects +----------------------------------- + +Objects which are not module-global, but should be treated in a similar +fashion -- such as methods [4]_ or nested classes -- cannot currently be +pickled (or, rather, unpickled) because the pickle protocol does not +correctly specify how to retrieve them. One solution would be through the +adjunction of a ``__namespace__`` (or ``__qualname__``) to all class and +function objects, specifying the full "path" by which they can be retrieved. +For globals, this would generally be ``"{}.{}".format(obj.__module__, obj.__name__)``. +Then a new opcode can resolve that path and push the object on the stack, +similarly to the GLOBAL opcode. + Binary encoding for all opcodes ------------------------------- @@ -96,6 +116,11 @@ .. [2] "Cannot pickle self-referencing sets": http://bugs.python.org/issue9269 +.. [3] "pickle/copyreg doesn't support keyword only arguments in __new__": + http://bugs.python.org/issue4727 + +.. [4] "pickle should support methods": + http://bugs.python.org/issue9276 Copyright ========= -- Repository URL: http://hg.python.org/peps From ncoghlan at gmail.com Tue Aug 16 04:35:48 2011 From: ncoghlan at gmail.com (Nick Coghlan) Date: Tue, 16 Aug 2011 12:35:48 +1000 Subject: [Python-checkins] peps: Add Alexandre's suggestions In-Reply-To: References: Message-ID: On Tue, Aug 16, 2011 at 11:30 AM, antoine.pitrou wrote: > +Serializing "pseudo-global" objects > +----------------------------------- > + > +Objects which are not module-global, but should be treated in a similar > +fashion -- such as methods [4]_ or nested classes -- cannot currently be > +pickled (or, rather, unpickled) because the pickle protocol does not > +correctly specify how to retrieve them. ?One solution would be through the > +adjunction of a ``__namespace__`` (or ``__qualname__``) to all class and > +function objects, specifying the full "path" by which they can be retrieved. > +For globals, this would generally be ``"{}.{}".format(obj.__module__, obj.__name__)``. > +Then a new opcode can resolve that path and push the object on the stack, > +similarly to the GLOBAL opcode. > + I think this is the part that ties in with the pickle-related aspects for PEP 395 - using '__qualname__' would be one way to align a module's real name with where it should be retrieved from and where it's documentation lives (I like 'qualified name' as a term, too). Cheers, Nick. -- Nick Coghlan?? |?? ncoghlan at gmail.com?? |?? Brisbane, Australia From solipsis at pitrou.net Tue Aug 16 05:21:57 2011 From: solipsis at pitrou.net (solipsis at pitrou.net) Date: Tue, 16 Aug 2011 05:21:57 +0200 Subject: [Python-checkins] Daily reference leaks (c821e3a54930): sum=0 Message-ID: results for c821e3a54930 on branch "default" -------------------------------------------- Command line was: ['./python', '-m', 'test.regrtest', '-uall', '-R', '3:3:/home/antoine/cpython/refleaks/reflogXqjeqV', '-x'] From python-checkins at python.org Tue Aug 16 12:14:08 2011 From: python-checkins at python.org (antoine.pitrou) Date: Tue, 16 Aug 2011 12:14:08 +0200 Subject: [Python-checkins] =?utf8?q?peps=3A_Add_a_suggestion_to_make_more_?= =?utf8?q?callables_picklable_=28as_multiprocessing?= Message-ID: http://hg.python.org/peps/rev/83606a50ac6a changeset: 3928:83606a50ac6a user: Antoine Pitrou date: Tue Aug 16 12:12:02 2011 +0200 summary: Add a suggestion to make more callables picklable (as multiprocessing already does) files: pep-3154.txt | 17 ++++++++++++++--- 1 files changed, 14 insertions(+), 3 deletions(-) diff --git a/pep-3154.txt b/pep-3154.txt --- a/pep-3154.txt +++ b/pep-3154.txt @@ -67,12 +67,20 @@ can not be pickled (or, rather, unpickled) [3]_. Both a new special method (``__getnewargs_ex__`` ?) and a new opcode (NEWOBJEX ?) are needed. +Serializing more callable objects +--------------------------------- + +Currently, only module-global functions are serializable. Multiprocessing +has custom support for pickling other callables such as bound methods [4]_. +This support could be folded in the protocol, and made more efficient +through a new GETATTR opcode. + Serializing "pseudo-global" objects ----------------------------------- Objects which are not module-global, but should be treated in a similar -fashion -- such as methods [4]_ or nested classes -- cannot currently be -pickled (or, rather, unpickled) because the pickle protocol does not +fashion -- such as unbound methods [5]_ or nested classes -- cannot currently +be pickled (or, rather, unpickled) because the pickle protocol does not correctly specify how to retrieve them. One solution would be through the adjunction of a ``__namespace__`` (or ``__qualname__``) to all class and function objects, specifying the full "path" by which they can be retrieved. @@ -119,7 +127,10 @@ .. [3] "pickle/copyreg doesn't support keyword only arguments in __new__": http://bugs.python.org/issue4727 -.. [4] "pickle should support methods": +.. [4] Lib/multiprocessing/forking.py: + http://hg.python.org/cpython/file/baea9f5f973c/Lib/multiprocessing/forking.py#l54 + +.. [5] "pickle should support methods": http://bugs.python.org/issue9276 Copyright -- Repository URL: http://hg.python.org/peps From ncoghlan at gmail.com Tue Aug 16 12:15:51 2011 From: ncoghlan at gmail.com (Nick Coghlan) Date: Tue, 16 Aug 2011 20:15:51 +1000 Subject: [Python-checkins] [Python-Dev] peps: Add Alexandre's suggestions In-Reply-To: <20110816112529.15fb6c69@pitrou.net> References: <20110816112529.15fb6c69@pitrou.net> Message-ID: On Tue, Aug 16, 2011 at 7:25 PM, Antoine Pitrou wrote: > On Tue, 16 Aug 2011 12:35:48 +1000 > Nick Coghlan wrote: >> On Tue, Aug 16, 2011 at 11:30 AM, antoine.pitrou >> wrote: >> > +Serializing "pseudo-global" objects >> > +----------------------------------- >> > + >> > +Objects which are not module-global, but should be treated in a similar >> > +fashion -- such as methods [4]_ or nested classes -- cannot currently be >> > +pickled (or, rather, unpickled) because the pickle protocol does not >> > +correctly specify how to retrieve them. ?One solution would be through the >> > +adjunction of a ``__namespace__`` (or ``__qualname__``) to all class and >> > +function objects, specifying the full "path" by which they can be retrieved. >> > +For globals, this would generally be ``"{}.{}".format(obj.__module__, obj.__name__)``. >> > +Then a new opcode can resolve that path and push the object on the stack, >> > +similarly to the GLOBAL opcode. >> > + >> >> I think this is the part that ties in with the pickle-related aspects >> for PEP 395 - using '__qualname__' ?would be one way to align a >> module's real name with where it should be retrieved from and where >> it's documentation lives (I like 'qualified name' as a term, too). > > Oops, I admit I hadn't read PEP 395. > PEP 395 focuses on module aliasing, while the suggestion above focuses > on the path of objects in modules. How can we reconcile the two? Do we > want __qualname__ to be a relative "path" inside the module? > (but then __qualname__ cannot specify its own module name). I was more thinking that if pickle grew the ability to handle two different names for objects, then PEP 395 could run off the same feature without having to mess with sys.modules. Cheers, Nick. -- Nick Coghlan?? |?? ncoghlan at gmail.com?? |?? Brisbane, Australia From solipsis at pitrou.net Tue Aug 16 13:23:44 2011 From: solipsis at pitrou.net (Antoine Pitrou) Date: Tue, 16 Aug 2011 13:23:44 +0200 Subject: [Python-checkins] [Python-Dev] peps: Add Alexandre's suggestions In-Reply-To: References: <20110816112529.15fb6c69@pitrou.net> Message-ID: <20110816132344.6d64aca7@pitrou.net> On Tue, 16 Aug 2011 20:15:51 +1000 Nick Coghlan wrote: > > > > Oops, I admit I hadn't read PEP 395. > > PEP 395 focuses on module aliasing, while the suggestion above focuses > > on the path of objects in modules. How can we reconcile the two? Do we > > want __qualname__ to be a relative "path" inside the module? > > (but then __qualname__ cannot specify its own module name). > > I was more thinking that if pickle grew the ability to handle two > different names for objects, then PEP 395 could run off the same > feature without having to mess with sys.modules. But what happens if a module contains, say, a nested class with a __qualname__ (assigned by the interpreter) of "module_name.A.B", and the module later gets a __qualname__ (assigned by the user) of "module_alias"? Regards Antoine. From ncoghlan at gmail.com Tue Aug 16 13:37:31 2011 From: ncoghlan at gmail.com (Nick Coghlan) Date: Tue, 16 Aug 2011 21:37:31 +1000 Subject: [Python-checkins] [Python-Dev] peps: Add Alexandre's suggestions In-Reply-To: <20110816132344.6d64aca7@pitrou.net> References: <20110816112529.15fb6c69@pitrou.net> <20110816132344.6d64aca7@pitrou.net> Message-ID: On Tue, Aug 16, 2011 at 9:23 PM, Antoine Pitrou wrote: > On Tue, 16 Aug 2011 20:15:51 +1000 > Nick Coghlan wrote: >> > >> > Oops, I admit I hadn't read PEP 395. >> > PEP 395 focuses on module aliasing, while the suggestion above focuses >> > on the path of objects in modules. How can we reconcile the two? Do we >> > want __qualname__ to be a relative "path" inside the module? >> > (but then __qualname__ cannot specify its own module name). >> >> I was more thinking that if pickle grew the ability to handle two >> different names for objects, then PEP 395 could run off the same >> feature without having to mess with sys.modules. > > But what happens if a module contains, say, a nested class with a > __qualname__ (assigned by the interpreter) of "module_name.A.B", and the > module later gets a __qualname__ (assigned by the user) of > "module_alias"? Yeah, I don't think it works with PEP 395 in its current state. But then, I'm not sure 395 will work at all in its current state - definitely a work in progress, that one. However, I'll definitely keep this aspect in mind next time I update it - even if they don't use the same mechanism, they should at least be compatible proposals. Cheers, Nick. -- Nick Coghlan?? |?? ncoghlan at gmail.com?? |?? Brisbane, Australia From python-checkins at python.org Tue Aug 16 18:03:53 2011 From: python-checkins at python.org (ezio.melotti) Date: Tue, 16 Aug 2011 18:03:53 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_=239723=3A_refactor_regex?= =?utf8?q?=2E?= Message-ID: http://hg.python.org/cpython/rev/5d4438001069 changeset: 71875:5d4438001069 parent: 71872:c821e3a54930 user: Ezio Melotti date: Tue Aug 16 19:03:41 2011 +0300 summary: #9723: refactor regex. files: Lib/shlex.py | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Lib/shlex.py b/Lib/shlex.py --- a/Lib/shlex.py +++ b/Lib/shlex.py @@ -276,7 +276,7 @@ return list(lex) -_find_unsafe = re.compile(r'[^\w@%\-\+=:,\./]', re.ASCII).search +_find_unsafe = re.compile(r'[^\w@%+=:,./-]', re.ASCII).search def quote(s): """Return a shell-escaped version of the string *s*.""" -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Tue Aug 16 19:14:25 2011 From: python-checkins at python.org (eric.araujo) Date: Tue, 16 Aug 2011 19:14:25 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=283=2E2=29=3A_Revert_change_t?= =?utf8?q?hat_was_not_a_syntax_fix_but_actually_a_behavior_change?= Message-ID: http://hg.python.org/cpython/rev/1295eff4dc06 changeset: 71876:1295eff4dc06 branch: 3.2 parent: 71870:16edc5cf4a79 user: ?ric Araujo date: Tue Aug 16 19:05:56 2011 +0200 summary: Revert change that was not a syntax fix but actually a behavior change files: Makefile.pre.in | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Makefile.pre.in b/Makefile.pre.in --- a/Makefile.pre.in +++ b/Makefile.pre.in @@ -1313,7 +1313,7 @@ -o -name .hgignore \ -o -name .bzrignore \ -o -name MANIFEST \ - -print + -o -print # Perform some verification checks on any modified files. patchcheck: -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Tue Aug 16 19:14:26 2011 From: python-checkins at python.org (eric.araujo) Date: Tue, 16 Aug 2011 19:14:26 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Fix_typo?= Message-ID: http://hg.python.org/cpython/rev/676ab1d05a26 changeset: 71877:676ab1d05a26 parent: 71872:c821e3a54930 user: ?ric Araujo date: Tue Aug 16 19:09:56 2011 +0200 summary: Fix typo files: Doc/library/collections.rst | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Doc/library/collections.rst b/Doc/library/collections.rst --- a/Doc/library/collections.rst +++ b/Doc/library/collections.rst @@ -72,7 +72,7 @@ A user updateable list of mappings. The list is ordered from first-searched to last-searched. It is the only stored state and can - modified to change which mappings are searched. The list should + be modified to change which mappings are searched. The list should always contain at least one mapping. .. method:: new_child() -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Tue Aug 16 19:14:27 2011 From: python-checkins at python.org (eric.araujo) Date: Tue, 16 Aug 2011 19:14:27 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Move_versionadded_directive?= =?utf8?q?_to_the_top_level=2C_as_we_do_in_other_files?= Message-ID: http://hg.python.org/cpython/rev/785c01719c85 changeset: 71878:785c01719c85 user: ?ric Araujo date: Tue Aug 16 19:10:24 2011 +0200 summary: Move versionadded directive to the top level, as we do in other files files: Doc/library/collections.abc.rst | 5 +++-- 1 files changed, 3 insertions(+), 2 deletions(-) diff --git a/Doc/library/collections.abc.rst b/Doc/library/collections.abc.rst --- a/Doc/library/collections.abc.rst +++ b/Doc/library/collections.abc.rst @@ -6,6 +6,9 @@ .. moduleauthor:: Raymond Hettinger .. sectionauthor:: Raymond Hettinger +.. versionadded:: 3.3 + Formerly, this module was part of the :mod:`collections` module. + .. testsetup:: * from collections import * @@ -20,8 +23,6 @@ can be used to test whether a class provides a particular interface; for example, whether it is hashable or whether it is a mapping. -.. versionchanged:: 3.3 - Formerly, this module was part of the :mod:`collections` module. .. _collections-abstract-base-classes: -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Tue Aug 16 19:14:31 2011 From: python-checkins at python.org (eric.araujo) Date: Tue, 16 Aug 2011 19:14:31 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=28merge_3=2E2_-=3E_default=29?= =?utf8?q?=3A_Merge_Makefile_fix_from_3=2E2_=28thanks_Georg=29?= Message-ID: http://hg.python.org/cpython/rev/a57c90bc5597 changeset: 71879:a57c90bc5597 parent: 71878:785c01719c85 parent: 71876:1295eff4dc06 user: ?ric Araujo date: Tue Aug 16 19:11:00 2011 +0200 summary: Merge Makefile fix from 3.2 (thanks Georg) files: Makefile.pre.in | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Makefile.pre.in b/Makefile.pre.in --- a/Makefile.pre.in +++ b/Makefile.pre.in @@ -1359,7 +1359,7 @@ -o -name .hgignore \ -o -name .bzrignore \ -o -name MANIFEST \ - -print + -o -print # Perform some verification checks on any modified files. patchcheck: -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Tue Aug 16 19:14:32 2011 From: python-checkins at python.org (eric.araujo) Date: Tue, 16 Aug 2011 19:14:32 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=28merge_default_-=3E_default?= =?utf8?q?=29=3A_Branch_merge?= Message-ID: http://hg.python.org/cpython/rev/dcd2480b9a76 changeset: 71880:dcd2480b9a76 parent: 71875:5d4438001069 parent: 71879:a57c90bc5597 user: ?ric Araujo date: Tue Aug 16 19:13:58 2011 +0200 summary: Branch merge files: Doc/library/collections.abc.rst | 5 +++-- Doc/library/collections.rst | 2 +- Makefile.pre.in | 2 +- 3 files changed, 5 insertions(+), 4 deletions(-) diff --git a/Doc/library/collections.abc.rst b/Doc/library/collections.abc.rst --- a/Doc/library/collections.abc.rst +++ b/Doc/library/collections.abc.rst @@ -6,6 +6,9 @@ .. moduleauthor:: Raymond Hettinger .. sectionauthor:: Raymond Hettinger +.. versionadded:: 3.3 + Formerly, this module was part of the :mod:`collections` module. + .. testsetup:: * from collections import * @@ -20,8 +23,6 @@ can be used to test whether a class provides a particular interface; for example, whether it is hashable or whether it is a mapping. -.. versionchanged:: 3.3 - Formerly, this module was part of the :mod:`collections` module. .. _collections-abstract-base-classes: diff --git a/Doc/library/collections.rst b/Doc/library/collections.rst --- a/Doc/library/collections.rst +++ b/Doc/library/collections.rst @@ -72,7 +72,7 @@ A user updateable list of mappings. The list is ordered from first-searched to last-searched. It is the only stored state and can - modified to change which mappings are searched. The list should + be modified to change which mappings are searched. The list should always contain at least one mapping. .. method:: new_child() diff --git a/Makefile.pre.in b/Makefile.pre.in --- a/Makefile.pre.in +++ b/Makefile.pre.in @@ -1359,7 +1359,7 @@ -o -name .hgignore \ -o -name .bzrignore \ -o -name MANIFEST \ - -print + -o -print # Perform some verification checks on any modified files. patchcheck: -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Tue Aug 16 20:04:35 2011 From: python-checkins at python.org (antoine.pitrou) Date: Tue, 16 Aug 2011 20:04:35 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Use_-n_for_tests_under_Wind?= =?utf8?q?ows?= Message-ID: http://hg.python.org/cpython/rev/03d439f4ec5c changeset: 71881:03d439f4ec5c user: Antoine Pitrou date: Tue Aug 16 20:02:26 2011 +0200 summary: Use -n for tests under Windows files: Tools/scripts/run_tests.py | 2 ++ 1 files changed, 2 insertions(+), 0 deletions(-) diff --git a/Tools/scripts/run_tests.py b/Tools/scripts/run_tests.py --- a/Tools/scripts/run_tests.py +++ b/Tools/scripts/run_tests.py @@ -32,6 +32,8 @@ '-r', # Randomize test order '-w', # Re-run failed tests in verbose mode ]) + if sys.platform == 'win32': + args.append('-n') # Silence alerts under Windows if not any(is_multiprocess_flag(arg) for arg in regrtest_args): args.extend(['-j', '0']) # Use all CPU cores if not any(is_resource_use_flag(arg) for arg in regrtest_args): -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Tue Aug 16 20:10:41 2011 From: python-checkins at python.org (sandro.tosi) Date: Tue, 16 Aug 2011 20:10:41 +0200 Subject: [Python-checkins] =?utf8?b?Y3B5dGhvbiAoMi43KTogIzEyNzYxOiBmaXgg?= =?utf8?q?wording_of_zlib_license_section?= Message-ID: http://hg.python.org/cpython/rev/80ac94ad381e changeset: 71882:80ac94ad381e branch: 2.7 parent: 71874:baea9f5f973c user: Sandro Tosi date: Tue Aug 16 20:02:15 2011 +0200 summary: #12761: fix wording of zlib license section files: Doc/license.rst | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Doc/license.rst b/Doc/license.rst --- a/Doc/license.rst +++ b/Doc/license.rst @@ -917,7 +917,7 @@ ---- The :mod:`zlib` extension is built using an included copy of the zlib -sources unless the zlib version found on the system is too old to be +sources if the zlib version found on the system is too old to be used for the build:: Copyright (C) 1995-2010 Jean-loup Gailly and Mark Adler -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Tue Aug 16 20:10:42 2011 From: python-checkins at python.org (sandro.tosi) Date: Tue, 16 Aug 2011 20:10:42 +0200 Subject: [Python-checkins] =?utf8?b?Y3B5dGhvbiAoMy4yKTogIzEyNzYxOiBmaXgg?= =?utf8?q?wording_of_zlib_license_section?= Message-ID: http://hg.python.org/cpython/rev/16a02530fd81 changeset: 71883:16a02530fd81 branch: 3.2 parent: 71876:1295eff4dc06 user: Sandro Tosi date: Tue Aug 16 20:03:11 2011 +0200 summary: #12761: fix wording of zlib license section files: Doc/license.rst | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Doc/license.rst b/Doc/license.rst --- a/Doc/license.rst +++ b/Doc/license.rst @@ -873,7 +873,7 @@ ---- The :mod:`zlib` extension is built using an included copy of the zlib -sources unless the zlib version found on the system is too old to be +sources if the zlib version found on the system is too old to be used for the build:: Copyright (C) 1995-2011 Jean-loup Gailly and Mark Adler -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Tue Aug 16 20:10:42 2011 From: python-checkins at python.org (sandro.tosi) Date: Tue, 16 Aug 2011 20:10:42 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=28merge_3=2E2_-=3E_default=29?= =?utf8?q?=3A_merge_with_3=2E2?= Message-ID: http://hg.python.org/cpython/rev/e0df665b3100 changeset: 71884:e0df665b3100 parent: 71880:dcd2480b9a76 parent: 71883:16a02530fd81 user: Sandro Tosi date: Tue Aug 16 20:03:50 2011 +0200 summary: merge with 3.2 files: Doc/license.rst | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Doc/license.rst b/Doc/license.rst --- a/Doc/license.rst +++ b/Doc/license.rst @@ -875,7 +875,7 @@ ---- The :mod:`zlib` extension is built using an included copy of the zlib -sources unless the zlib version found on the system is too old to be +sources if the zlib version found on the system is too old to be used for the build:: Copyright (C) 1995-2011 Jean-loup Gailly and Mark Adler -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Tue Aug 16 20:10:43 2011 From: python-checkins at python.org (sandro.tosi) Date: Tue, 16 Aug 2011 20:10:43 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=28merge_default_-=3E_default?= =?utf8?q?=29=3A_merge_heads?= Message-ID: http://hg.python.org/cpython/rev/cc6199ca5c73 changeset: 71885:cc6199ca5c73 parent: 71884:e0df665b3100 parent: 71881:03d439f4ec5c user: Sandro Tosi date: Tue Aug 16 20:08:04 2011 +0200 summary: merge heads files: Tools/scripts/run_tests.py | 2 ++ 1 files changed, 2 insertions(+), 0 deletions(-) diff --git a/Tools/scripts/run_tests.py b/Tools/scripts/run_tests.py --- a/Tools/scripts/run_tests.py +++ b/Tools/scripts/run_tests.py @@ -32,6 +32,8 @@ '-r', # Randomize test order '-w', # Re-run failed tests in verbose mode ]) + if sys.platform == 'win32': + args.append('-n') # Silence alerts under Windows if not any(is_multiprocess_flag(arg) for arg in regrtest_args): args.extend(['-j', '0']) # Use all CPU cores if not any(is_resource_use_flag(arg) for arg in regrtest_args): -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Wed Aug 17 00:46:53 2011 From: python-checkins at python.org (benjamin.peterson) Date: Wed, 17 Aug 2011 00:46:53 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_some_*nixes_decided_not_to_?= =?utf8?q?call_init_process_1_=28closes_=2312763=29?= Message-ID: http://hg.python.org/cpython/rev/09f2ddd3d15a changeset: 71886:09f2ddd3d15a parent: 71872:c821e3a54930 user: Benjamin Peterson date: Tue Aug 16 17:46:04 2011 -0500 summary: some *nixes decided not to call init process 1 (closes #12763) files: Lib/test/test_posix.py | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Lib/test/test_posix.py b/Lib/test/test_posix.py --- a/Lib/test/test_posix.py +++ b/Lib/test/test_posix.py @@ -862,7 +862,7 @@ try: init = posix.sched_getscheduler(1) except OSError as e: - if e.errno != errno.EPERM: + if e.errno != errno.EPERM and e.errno != errno.ESRCH: raise else: self.assertIn(init, possible_schedulers) -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Wed Aug 17 00:46:54 2011 From: python-checkins at python.org (benjamin.peterson) Date: Wed, 17 Aug 2011 00:46:54 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=28merge_default_-=3E_default?= =?utf8?q?=29=3A_merge_heads?= Message-ID: http://hg.python.org/cpython/rev/0ad5804c4d62 changeset: 71887:0ad5804c4d62 parent: 71886:09f2ddd3d15a parent: 71885:cc6199ca5c73 user: Benjamin Peterson date: Tue Aug 16 17:46:44 2011 -0500 summary: merge heads files: Doc/library/collections.abc.rst | 5 +++-- Doc/library/collections.rst | 2 +- Doc/license.rst | 2 +- Lib/shlex.py | 2 +- Makefile.pre.in | 2 +- Tools/scripts/run_tests.py | 2 ++ 6 files changed, 9 insertions(+), 6 deletions(-) diff --git a/Doc/library/collections.abc.rst b/Doc/library/collections.abc.rst --- a/Doc/library/collections.abc.rst +++ b/Doc/library/collections.abc.rst @@ -6,6 +6,9 @@ .. moduleauthor:: Raymond Hettinger .. sectionauthor:: Raymond Hettinger +.. versionadded:: 3.3 + Formerly, this module was part of the :mod:`collections` module. + .. testsetup:: * from collections import * @@ -20,8 +23,6 @@ can be used to test whether a class provides a particular interface; for example, whether it is hashable or whether it is a mapping. -.. versionchanged:: 3.3 - Formerly, this module was part of the :mod:`collections` module. .. _collections-abstract-base-classes: diff --git a/Doc/library/collections.rst b/Doc/library/collections.rst --- a/Doc/library/collections.rst +++ b/Doc/library/collections.rst @@ -72,7 +72,7 @@ A user updateable list of mappings. The list is ordered from first-searched to last-searched. It is the only stored state and can - modified to change which mappings are searched. The list should + be modified to change which mappings are searched. The list should always contain at least one mapping. .. method:: new_child() diff --git a/Doc/license.rst b/Doc/license.rst --- a/Doc/license.rst +++ b/Doc/license.rst @@ -875,7 +875,7 @@ ---- The :mod:`zlib` extension is built using an included copy of the zlib -sources unless the zlib version found on the system is too old to be +sources if the zlib version found on the system is too old to be used for the build:: Copyright (C) 1995-2011 Jean-loup Gailly and Mark Adler diff --git a/Lib/shlex.py b/Lib/shlex.py --- a/Lib/shlex.py +++ b/Lib/shlex.py @@ -276,7 +276,7 @@ return list(lex) -_find_unsafe = re.compile(r'[^\w@%\-\+=:,\./]', re.ASCII).search +_find_unsafe = re.compile(r'[^\w@%+=:,./-]', re.ASCII).search def quote(s): """Return a shell-escaped version of the string *s*.""" diff --git a/Makefile.pre.in b/Makefile.pre.in --- a/Makefile.pre.in +++ b/Makefile.pre.in @@ -1359,7 +1359,7 @@ -o -name .hgignore \ -o -name .bzrignore \ -o -name MANIFEST \ - -print + -o -print # Perform some verification checks on any modified files. patchcheck: diff --git a/Tools/scripts/run_tests.py b/Tools/scripts/run_tests.py --- a/Tools/scripts/run_tests.py +++ b/Tools/scripts/run_tests.py @@ -32,6 +32,8 @@ '-r', # Randomize test order '-w', # Re-run failed tests in verbose mode ]) + if sys.platform == 'win32': + args.append('-n') # Silence alerts under Windows if not any(is_multiprocess_flag(arg) for arg in regrtest_args): args.extend(['-j', '0']) # Use all CPU cores if not any(is_resource_use_flag(arg) for arg in regrtest_args): -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Wed Aug 17 01:53:37 2011 From: python-checkins at python.org (benjamin.peterson) Date: Wed, 17 Aug 2011 01:53:37 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_complain_when_a_class_varia?= =?utf8?q?ble_shadows_a_name_in_=5F=5Fslots=5F=5F_=28closes_=2312766=29?= Message-ID: http://hg.python.org/cpython/rev/45b63a8a76c9 changeset: 71888:45b63a8a76c9 user: Benjamin Peterson date: Tue Aug 16 18:53:26 2011 -0500 summary: complain when a class variable shadows a name in __slots__ (closes #12766) files: Lib/test/test_descr.py | 8 ++++++++ Misc/NEWS | 3 +++ Objects/typeobject.c | 6 ++++++ 3 files changed, 17 insertions(+), 0 deletions(-) diff --git a/Lib/test/test_descr.py b/Lib/test/test_descr.py --- a/Lib/test/test_descr.py +++ b/Lib/test/test_descr.py @@ -4253,6 +4253,14 @@ foo = Foo() str(foo) + def test_slot_shadows_class(self): + with self.assertRaises(ValueError) as cm: + class X: + __slots__ = ["foo"] + foo = None + m = str(cm.exception) + self.assertEqual("'foo' in __slots__ conflicts with class variable", m) + class DictProxyTests(unittest.TestCase): def setUp(self): class C(object): diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -10,6 +10,9 @@ Core and Builtins ----------------- +- Issue #12766: Raise an ValueError when creating a class with a class variable + that conflicts with a name in __slots__. + - Issue #12266: Fix str.capitalize() to correctly uppercase/lowercase titlecased and cased non-letter characters. diff --git a/Objects/typeobject.c b/Objects/typeobject.c --- a/Objects/typeobject.c +++ b/Objects/typeobject.c @@ -2094,6 +2094,12 @@ if (!tmp) goto bad_slots; PyList_SET_ITEM(newslots, j, tmp); + if (PyDict_GetItem(dict, tmp)) { + PyErr_Format(PyExc_ValueError, + "%R in __slots__ conflicts with class variable", + tmp); + goto bad_slots; + } j++; } assert(j == nslots - add_dict - add_weak); -- Repository URL: http://hg.python.org/cpython From solipsis at pitrou.net Wed Aug 17 05:22:07 2011 From: solipsis at pitrou.net (solipsis at pitrou.net) Date: Wed, 17 Aug 2011 05:22:07 +0200 Subject: [Python-checkins] Daily reference leaks (45b63a8a76c9): sum=6 Message-ID: results for 45b63a8a76c9 on branch "default" -------------------------------------------- test_descr leaked [2, 2, 2] references, sum=6 Command line was: ['./python', '-m', 'test.regrtest', '-uall', '-R', '3:3:/home/antoine/cpython/refleaks/reflogFAg29u', '-x'] From python-checkins at python.org Wed Aug 17 05:28:35 2011 From: python-checkins at python.org (benjamin.peterson) Date: Wed, 17 Aug 2011 05:28:35 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=283=2E2=29=3A_fix_possible_re?= =?utf8?q?fleaks?= Message-ID: http://hg.python.org/cpython/rev/f0515cc7ee20 changeset: 71889:f0515cc7ee20 branch: 3.2 parent: 71883:16a02530fd81 user: Benjamin Peterson date: Tue Aug 16 22:26:48 2011 -0500 summary: fix possible refleaks files: Objects/typeobject.c | 4 +++- 1 files changed, 3 insertions(+), 1 deletions(-) diff --git a/Objects/typeobject.c b/Objects/typeobject.c --- a/Objects/typeobject.c +++ b/Objects/typeobject.c @@ -2093,8 +2093,10 @@ PyUnicode_CompareWithASCIIString(tmp, "__weakref__") == 0)) continue; tmp =_Py_Mangle(name, tmp); - if (!tmp) + if (!tmp) { + Py_DECREF(newslots); goto bad_slots; + } PyList_SET_ITEM(newslots, j, tmp); j++; } -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Wed Aug 17 05:28:36 2011 From: python-checkins at python.org (benjamin.peterson) Date: Wed, 17 Aug 2011 05:28:36 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=282=2E7=29=3A_fix_possible_re?= =?utf8?q?fleaks?= Message-ID: http://hg.python.org/cpython/rev/b4ccf8e1fdba changeset: 71890:b4ccf8e1fdba branch: 2.7 parent: 71882:80ac94ad381e user: Benjamin Peterson date: Tue Aug 16 22:26:48 2011 -0500 summary: fix possible refleaks files: Objects/typeobject.c | 4 +++- 1 files changed, 3 insertions(+), 1 deletions(-) diff --git a/Objects/typeobject.c b/Objects/typeobject.c --- a/Objects/typeobject.c +++ b/Objects/typeobject.c @@ -2235,8 +2235,10 @@ (add_weak && strcmp(s, "__weakref__") == 0)) continue; tmp =_Py_Mangle(name, tmp); - if (!tmp) + if (!tmp) { + Py_DECREF(newslots); goto bad_slots; + } PyList_SET_ITEM(newslots, j, tmp); j++; } -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Wed Aug 17 05:28:36 2011 From: python-checkins at python.org (benjamin.peterson) Date: Wed, 17 Aug 2011 05:28:36 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=28merge_3=2E2_-=3E_default=29?= =?utf8?q?=3A_merge_3=2E2?= Message-ID: http://hg.python.org/cpython/rev/4adbbe86cd92 changeset: 71891:4adbbe86cd92 parent: 71888:45b63a8a76c9 parent: 71889:f0515cc7ee20 user: Benjamin Peterson date: Tue Aug 16 22:27:42 2011 -0500 summary: merge 3.2 files: Objects/typeobject.c | 4 +++- 1 files changed, 3 insertions(+), 1 deletions(-) diff --git a/Objects/typeobject.c b/Objects/typeobject.c --- a/Objects/typeobject.c +++ b/Objects/typeobject.c @@ -2091,8 +2091,10 @@ PyUnicode_CompareWithASCIIString(tmp, "__weakref__") == 0)) continue; tmp =_Py_Mangle(name, tmp); - if (!tmp) + if (!tmp) { + Py_DECREF(newslots); goto bad_slots; + } PyList_SET_ITEM(newslots, j, tmp); if (PyDict_GetItem(dict, tmp)) { PyErr_Format(PyExc_ValueError, -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Wed Aug 17 05:28:37 2011 From: python-checkins at python.org (benjamin.peterson) Date: Wed, 17 Aug 2011 05:28:37 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_crush_other_possible_reflea?= =?utf8?q?ks_in_this_section?= Message-ID: http://hg.python.org/cpython/rev/3d1cdbd76fbe changeset: 71892:3d1cdbd76fbe user: Benjamin Peterson date: Tue Aug 16 22:28:23 2011 -0500 summary: crush other possible refleaks in this section files: Objects/typeobject.c | 1 + 1 files changed, 1 insertions(+), 0 deletions(-) diff --git a/Objects/typeobject.c b/Objects/typeobject.c --- a/Objects/typeobject.c +++ b/Objects/typeobject.c @@ -2100,6 +2100,7 @@ PyErr_Format(PyExc_ValueError, "%R in __slots__ conflicts with class variable", tmp); + Py_DECREF(newslots); goto bad_slots; } j++; -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Wed Aug 17 19:04:28 2011 From: python-checkins at python.org (benjamin.peterson) Date: Wed, 17 Aug 2011 19:04:28 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_improve_test_name?= Message-ID: http://hg.python.org/cpython/rev/0872cc0f71dc changeset: 71893:0872cc0f71dc user: Benjamin Peterson date: Wed Aug 17 11:48:23 2011 -0500 summary: improve test name files: Lib/test/test_descr.py | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Lib/test/test_descr.py b/Lib/test/test_descr.py --- a/Lib/test/test_descr.py +++ b/Lib/test/test_descr.py @@ -4253,7 +4253,7 @@ foo = Foo() str(foo) - def test_slot_shadows_class(self): + def test_slot_shadows_class_variable(self): with self.assertRaises(ValueError) as cm: class X: __slots__ = ["foo"] -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Wed Aug 17 19:04:29 2011 From: python-checkins at python.org (benjamin.peterson) Date: Wed, 17 Aug 2011 19:04:29 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_factor_out_common_checks_fo?= =?utf8?q?r_setting_special_type_attributes?= Message-ID: http://hg.python.org/cpython/rev/7fee7f9d2c03 changeset: 71894:7fee7f9d2c03 user: Benjamin Peterson date: Wed Aug 17 11:54:03 2011 -0500 summary: factor out common checks for setting special type attributes files: Objects/typeobject.c | 46 +++++++++++++------------------ 1 files changed, 19 insertions(+), 27 deletions(-) diff --git a/Objects/typeobject.c b/Objects/typeobject.c --- a/Objects/typeobject.c +++ b/Objects/typeobject.c @@ -201,6 +201,22 @@ {0} }; +static int +check_set_special_type_attr(PyTypeObject *type, PyObject *value, const char *name) +{ + if (!(type->tp_flags & Py_TPFLAGS_HEAPTYPE)) { + PyErr_Format(PyExc_TypeError, + "can't set %s.%s", type->tp_name, name); + return 0; + } + if (!value) { + PyErr_Format(PyExc_TypeError, + "can't delete %s.%s", type->tp_name, name); + return 0; + } + return 1; +} + static PyObject * type_name(PyTypeObject *type, void *context) { @@ -229,16 +245,8 @@ char *tp_name; PyObject *tmp; - if (!(type->tp_flags & Py_TPFLAGS_HEAPTYPE)) { - PyErr_Format(PyExc_TypeError, - "can't set %s.__name__", type->tp_name); + if (!check_set_special_type_attr(type, value, "__name__")) return -1; - } - if (!value) { - PyErr_Format(PyExc_TypeError, - "can't delete %s.__name__", type->tp_name); - return -1; - } if (!PyUnicode_Check(value)) { PyErr_Format(PyExc_TypeError, "can only assign string to %s.__name__, not '%s'", @@ -301,16 +309,8 @@ static int type_set_module(PyTypeObject *type, PyObject *value, void *context) { - if (!(type->tp_flags & Py_TPFLAGS_HEAPTYPE)) { - PyErr_Format(PyExc_TypeError, - "can't set %s.__module__", type->tp_name); + if (!check_set_special_type_attr(type, value, "__module__")) return -1; - } - if (!value) { - PyErr_Format(PyExc_TypeError, - "can't delete %s.__module__", type->tp_name); - return -1; - } PyType_Modified(type); @@ -433,16 +433,8 @@ PyTypeObject *new_base, *old_base; PyObject *old_bases, *old_mro; - if (!(type->tp_flags & Py_TPFLAGS_HEAPTYPE)) { - PyErr_Format(PyExc_TypeError, - "can't set %s.__bases__", type->tp_name); + if (!check_set_special_type_attr(type, value, "__bases__")) return -1; - } - if (!value) { - PyErr_Format(PyExc_TypeError, - "can't delete %s.__bases__", type->tp_name); - return -1; - } if (!PyTuple_Check(value)) { PyErr_Format(PyExc_TypeError, "can only assign tuple to %s.__bases__, not %s", -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Wed Aug 17 19:04:30 2011 From: python-checkins at python.org (benjamin.peterson) Date: Wed, 17 Aug 2011 19:04:30 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_make_=5F=5Fdoc=5F=5F_mutabl?= =?utf8?q?e_on_heaptypes_=28closes_=2312773=29?= Message-ID: http://hg.python.org/cpython/rev/ed2511c23dae changeset: 71895:ed2511c23dae user: Benjamin Peterson date: Wed Aug 17 12:03:47 2011 -0500 summary: make __doc__ mutable on heaptypes (closes #12773) files: Lib/test/test_descr.py | 13 +++++++++++++ Misc/NEWS | 2 ++ Objects/typeobject.c | 11 ++++++++++- 3 files changed, 25 insertions(+), 1 deletions(-) diff --git a/Lib/test/test_descr.py b/Lib/test/test_descr.py --- a/Lib/test/test_descr.py +++ b/Lib/test/test_descr.py @@ -4261,6 +4261,19 @@ m = str(cm.exception) self.assertEqual("'foo' in __slots__ conflicts with class variable", m) + def test_set_doc(self): + class X: + "elephant" + X.__doc__ = "banana" + self.assertEqual(X.__doc__, "banana") + with self.assertRaises(TypeError) as cm: + type(list).__dict__["__doc__"].__set__(list, "blah") + self.assertIn("can't set list.__doc__", str(cm.exception)) + with self.assertRaises(TypeError) as cm: + type(X).__dict__["__doc__"].__delete__(X) + self.assertIn("can't delete X.__doc__", str(cm.exception)) + self.assertEqual(X.__doc__, "banana") + class DictProxyTests(unittest.TestCase): def setUp(self): class C(object): diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -10,6 +10,8 @@ Core and Builtins ----------------- +- Issue #12773: Make __doc__ mutable on user-defined classes. + - Issue #12766: Raise an ValueError when creating a class with a class variable that conflicts with a name in __slots__. diff --git a/Objects/typeobject.c b/Objects/typeobject.c --- a/Objects/typeobject.c +++ b/Objects/typeobject.c @@ -588,6 +588,15 @@ return result; } +static int +type_set_doc(PyTypeObject *type, PyObject *value, void *context) +{ + if (!check_set_special_type_attr(type, value, "__doc__")) + return -1; + PyType_Modified(type); + return PyDict_SetItemString(type->tp_dict, "__doc__", value); +} + static PyObject * type___instancecheck__(PyObject *type, PyObject *inst) { @@ -623,7 +632,7 @@ {"__abstractmethods__", (getter)type_abstractmethods, (setter)type_set_abstractmethods, NULL}, {"__dict__", (getter)type_dict, NULL, NULL}, - {"__doc__", (getter)type_get_doc, NULL, NULL}, + {"__doc__", (getter)type_get_doc, (setter)type_set_doc, NULL}, {0} }; -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Wed Aug 17 19:05:34 2011 From: python-checkins at python.org (benjamin.peterson) Date: Wed, 17 Aug 2011 19:05:34 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_change_word?= Message-ID: http://hg.python.org/cpython/rev/1a49c98394df changeset: 71896:1a49c98394df user: Benjamin Peterson date: Wed Aug 17 12:05:13 2011 -0500 summary: change word files: Misc/NEWS | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -26,7 +26,7 @@ - Make type(None), type(Ellipsis), and type(NotImplemented) callable. They return the respective singleton instances. -- Forbid summing bytes in sum(). +- Forbid summing bytes with sum(). - Verify the types of AST strings and identifiers provided by the user before compiling them. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Wed Aug 17 20:51:59 2011 From: python-checkins at python.org (victor.stinner) Date: Wed, 17 Aug 2011 20:51:59 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Issue_=2312326=3A_don=27t_t?= =?utf8?q?est_the_major_version_of_sys=2Eplatform?= Message-ID: http://hg.python.org/cpython/rev/50f1922bc1d5 changeset: 71897:50f1922bc1d5 user: Victor Stinner date: Wed Aug 17 20:49:41 2011 +0200 summary: Issue #12326: don't test the major version of sys.platform Use startswith, instead of ==, when testing sys.platform to support new platforms like Linux 3 or OpenBSD 5. files: Lib/distutils/tests/test_bdist_rpm.py | 4 +- Lib/test/regrtest.py | 74 +++++++------- Lib/test/test_fcntl.py | 9 +- Lib/test/test_logging.py | 2 +- Lib/test/test_socket.py | 2 +- Lib/test/test_tarfile.py | 2 +- Lib/test/test_tempfile.py | 2 +- setup.py | 5 +- 8 files changed, 48 insertions(+), 52 deletions(-) diff --git a/Lib/distutils/tests/test_bdist_rpm.py b/Lib/distutils/tests/test_bdist_rpm.py --- a/Lib/distutils/tests/test_bdist_rpm.py +++ b/Lib/distutils/tests/test_bdist_rpm.py @@ -47,7 +47,7 @@ # XXX I am unable yet to make this test work without # spurious sdtout/stderr output under Mac OS X - if sys.platform != 'linux2': + if not sys.platform.startswith('linux'): return # this test will run only if the rpm commands are found @@ -87,7 +87,7 @@ # XXX I am unable yet to make this test work without # spurious sdtout/stderr output under Mac OS X - if sys.platform != 'linux2': + if not sys.platform.startswith('linux'): return # http://bugs.python.org/issue1533164 diff --git a/Lib/test/regrtest.py b/Lib/test/regrtest.py --- a/Lib/test/regrtest.py +++ b/Lib/test/regrtest.py @@ -1391,8 +1391,8 @@ # Tests that are expected to be skipped everywhere except on one platform # are also handled separately. -_expectations = { - 'win32': +_expectations = ( + ('win32', """ test__locale test_crypt @@ -1420,15 +1420,15 @@ test_threadsignals test_wait3 test_wait4 - """, - 'linux2': + """), + ('linux', """ test_curses test_largefile test_kqueue test_ossaudiodev - """, - 'unixware7': + """), + ('unixware', """ test_epoll test_largefile @@ -1438,8 +1438,8 @@ test_pyexpat test_sax test_sundry - """, - 'openunix8': + """), + ('openunix', """ test_epoll test_largefile @@ -1449,8 +1449,8 @@ test_pyexpat test_sax test_sundry - """, - 'sco_sv3': + """), + ('sco_sv', """ test_asynchat test_fork1 @@ -1469,8 +1469,8 @@ test_threaded_import test_threadedtempfile test_threading - """, - 'darwin': + """), + ('darwin', """ test__locale test_curses @@ -1482,8 +1482,8 @@ test_minidom test_ossaudiodev test_poll - """, - 'sunos5': + """), + ('sunos', """ test_curses test_dbm @@ -1494,8 +1494,8 @@ test_openpty test_zipfile test_zlib - """, - 'hp-ux11': + """), + ('hp-ux', """ test_curses test_epoll @@ -1510,8 +1510,8 @@ test_sax test_zipfile test_zlib - """, - 'cygwin': + """), + ('cygwin', """ test_curses test_dbm @@ -1522,8 +1522,8 @@ test_locale test_ossaudiodev test_socketserver - """, - 'os2emx': + """), + ('os2emx', """ test_audioop test_curses @@ -1536,8 +1536,8 @@ test_pty test_resource test_signal - """, - 'freebsd4': + """), + ('freebsd', """ test_epoll test_dbm_gnu @@ -1553,8 +1553,8 @@ test_timeout test_urllibnet test_multiprocessing - """, - 'aix5': + """), + ('aix', """ test_bz2 test_epoll @@ -1568,8 +1568,8 @@ test_ttk_textonly test_zipimport test_zlib - """, - 'openbsd3': + """), + ('openbsd', """ test_ctypes test_epoll @@ -1583,8 +1583,8 @@ test_ttk_guionly test_ttk_textonly test_multiprocessing - """, - 'netbsd3': + """), + ('netbsd', """ test_ctypes test_curses @@ -1598,12 +1598,8 @@ test_ttk_guionly test_ttk_textonly test_multiprocessing - """, -} -_expectations['freebsd5'] = _expectations['freebsd4'] -_expectations['freebsd6'] = _expectations['freebsd4'] -_expectations['freebsd7'] = _expectations['freebsd4'] -_expectations['freebsd8'] = _expectations['freebsd4'] + """), +) class _ExpectedSkips: def __init__(self): @@ -1611,9 +1607,13 @@ from test import test_timeout self.valid = False - if sys.platform in _expectations: - s = _expectations[sys.platform] - self.expected = set(s.split()) + expected = None + for item in _expectations: + if sys.platform.startswith(item[0]): + expected = item[1] + break + if expected is not None: + self.expected = set(expected.split()) # These are broken tests, for now skipped on every platform. # XXX Fix these! diff --git a/Lib/test/test_fcntl.py b/Lib/test/test_fcntl.py --- a/Lib/test/test_fcntl.py +++ b/Lib/test/test_fcntl.py @@ -23,12 +23,9 @@ else: start_len = "qq" - if sys.platform in ('netbsd1', 'netbsd2', 'netbsd3', - 'Darwin1.2', 'darwin', - 'freebsd2', 'freebsd3', 'freebsd4', 'freebsd5', - 'freebsd6', 'freebsd7', 'freebsd8', - 'bsdos2', 'bsdos3', 'bsdos4', - 'openbsd', 'openbsd2', 'openbsd3', 'openbsd4'): + if (any(sys.platform.startswith(prefix) + for prefix in ('netbsd', 'freebsd', 'openbsd', 'bsdos')) + or sys.platform in ('Darwin1.2', 'darwin')): if struct.calcsize('l') == 8: off_t = 'l' pid_t = 'i' diff --git a/Lib/test/test_logging.py b/Lib/test/test_logging.py --- a/Lib/test/test_logging.py +++ b/Lib/test/test_logging.py @@ -527,7 +527,7 @@ def test_builtin_handlers(self): # We can't actually *use* too many handlers in the tests, # but we can try instantiating them with various options - if sys.platform in ('linux2', 'darwin'): + if sys.platform.startswith('linux') or sys.platform == 'darwin': for existing in (True, False): fd, fn = tempfile.mkstemp() os.close(fd) diff --git a/Lib/test/test_socket.py b/Lib/test/test_socket.py --- a/Lib/test/test_socket.py +++ b/Lib/test/test_socket.py @@ -2074,7 +2074,7 @@ ]) if hasattr(socket, "socketpair"): tests.append(BasicSocketPairTest) - if sys.platform == 'linux2': + if sys.platform.startswith('linux'): tests.append(TestLinuxAbstractNamespace) if isTipcAvailable(): tests.append(TIPCTest) diff --git a/Lib/test/test_tarfile.py b/Lib/test/test_tarfile.py --- a/Lib/test/test_tarfile.py +++ b/Lib/test/test_tarfile.py @@ -703,7 +703,7 @@ # Return True if the platform knows the st_blocks stat attribute and # uses st_blocks units of 512 bytes, and if the filesystem is able to # store holes in files. - if sys.platform == "linux2": + if sys.platform.startswith("linux"): # Linux evidentially has 512 byte st_blocks units. name = os.path.join(TEMPDIR, "sparse-test") with open(name, "wb") as fobj: diff --git a/Lib/test/test_tempfile.py b/Lib/test/test_tempfile.py --- a/Lib/test/test_tempfile.py +++ b/Lib/test/test_tempfile.py @@ -20,7 +20,7 @@ # TEST_FILES may need to be tweaked for systems depending on the maximum # number of files that can be opened at one time (see ulimit -n) -if sys.platform in ('openbsd3', 'openbsd4'): +if sys.platform.startswith('openbsd'): TEST_FILES = 48 else: TEST_FILES = 100 diff --git a/setup.py b/setup.py --- a/setup.py +++ b/setup.py @@ -1381,9 +1381,8 @@ # End multiprocessing # Platform-specific libraries - if (platform in ('linux2', 'freebsd4', 'freebsd5', 'freebsd6', - 'freebsd7', 'freebsd8') - or platform.startswith("gnukfreebsd")): + if any(platform.startswith(prefix) + for prefix in ("linux", "freebsd", "gnukfreebsd")): exts.append( Extension('ossaudiodev', ['ossaudiodev.c']) ) else: missing.append('ossaudiodev') -- Repository URL: http://hg.python.org/cpython From solipsis at pitrou.net Thu Aug 18 05:27:40 2011 From: solipsis at pitrou.net (solipsis at pitrou.net) Date: Thu, 18 Aug 2011 05:27:40 +0200 Subject: [Python-checkins] Daily reference leaks (50f1922bc1d5): sum=0 Message-ID: results for 50f1922bc1d5 on branch "default" -------------------------------------------- Command line was: ['./python', '-m', 'test.regrtest', '-uall', '-R', '3:3:/home/antoine/cpython/refleaks/reflogWsu_4D', '-x'] From python-checkins at python.org Thu Aug 18 11:12:56 2011 From: python-checkins at python.org (vinay.sajip) Date: Thu, 18 Aug 2011 11:12:56 +0200 Subject: [Python-checkins] =?utf8?q?distutils2=3A_Backported_packaging_fro?= =?utf8?q?m_cpython_default=2C_overwriting_previous_version=2E?= Message-ID: http://hg.python.org/distutils2/rev/e3ec249ee8bc changeset: 1120:e3ec249ee8bc user: Vinay Sajip date: Thu Aug 18 10:11:00 2011 +0100 summary: Backported packaging from cpython default, overwriting previous version. files: distutils2/README | 13 - distutils2/__init__.py | 6 +- distutils2/_trove.py | 1088 +++--- distutils2/command/__init__.py | 10 +- distutils2/command/bdist.py | 29 +- distutils2/command/bdist_dumb.py | 39 +- distutils2/command/bdist_msi.py | 121 +- distutils2/command/bdist_wininst.py | 142 +- distutils2/command/build.py | 10 +- distutils2/command/build_clib.py | 61 +- distutils2/command/build_ext.py | 161 +- distutils2/command/build_py.py | 55 +- distutils2/command/build_scripts.py | 93 +- distutils2/command/check.py | 44 +- distutils2/command/clean.py | 11 +- distutils2/command/cmd.py | 96 +- distutils2/command/command_template | 13 +- distutils2/command/config.py | 100 +- distutils2/command/install_data.py | 48 +- distutils2/command/install_dist.py | 74 +- distutils2/command/install_distinfo.py | 62 +- distutils2/command/install_headers.py | 12 +- distutils2/command/install_lib.py | 38 +- distutils2/command/install_scripts.py | 23 +- distutils2/command/register.py | 111 +- distutils2/command/sdist.py | 109 +- distutils2/command/test.py | 30 +- distutils2/command/upload.py | 140 +- distutils2/command/upload_docs.py | 110 +- distutils2/command/wininst-10.0-amd64.exe | Bin distutils2/command/wininst-10.0.exe | Bin distutils2/compat.py | 20 +- distutils2/compiler/__init__.py | 75 +- distutils2/compiler/bcppcompiler.py | 181 +- distutils2/compiler/ccompiler.py | 192 +- distutils2/compiler/cygwinccompiler.py | 73 +- distutils2/compiler/extension.py | 43 +- distutils2/compiler/msvc9compiler.py | 166 +- distutils2/compiler/msvccompiler.py | 282 +- distutils2/compiler/unixccompiler.py | 45 +- distutils2/config.py | 200 +- distutils2/create.py | 689 ++++ distutils2/database.py | 647 ++++ distutils2/depgraph.py | 125 +- distutils2/dist.py | 190 +- distutils2/errors.py | 52 +- distutils2/fancy_getopt.py | 180 +- distutils2/install.py | 391 +- distutils2/manifest.py | 141 +- distutils2/markers.py | 69 +- distutils2/metadata.py | 160 +- distutils2/mkcfg.py | 657 ---- distutils2/index/__init__.py | 6 +- distutils2/index/base.py | 4 +- distutils2/index/dist.py | 87 +- distutils2/index/errors.py | 22 +- distutils2/index/mirrors.py | 8 +- distutils2/index/simple.py | 183 +- distutils2/index/wrapper.py | 18 +- distutils2/index/xmlrpc.py | 41 +- distutils2/pysetup | 5 - distutils2/resources.py | 25 - distutils2/run.py | 568 ++- distutils2/tests/__init__.py | 54 +- distutils2/tests/__main__.py | 23 + distutils2/tests/fake_dists/babar-0.1.dist-info/INSTALLER | Bin distutils2/tests/fake_dists/babar-0.1.dist-info/METADATA | 4 + distutils2/tests/fake_dists/babar-0.1.dist-info/RECORD | Bin distutils2/tests/fake_dists/babar-0.1.dist-info/REQUESTED | Bin distutils2/tests/fake_dists/babar-0.1.dist-info/RESOURCES | 2 + distutils2/tests/fake_dists/babar.cfg | 1 + distutils2/tests/fake_dists/babar.png | Bin distutils2/tests/fake_dists/bacon-0.1.egg-info/PKG-INFO | 6 + distutils2/tests/fake_dists/banana-0.4.egg/EGG-INFO/PKG-INFO | 18 + distutils2/tests/fake_dists/banana-0.4.egg/EGG-INFO/SOURCES.txt | Bin distutils2/tests/fake_dists/banana-0.4.egg/EGG-INFO/dependency_links.txt | 1 + distutils2/tests/fake_dists/banana-0.4.egg/EGG-INFO/entry_points.txt | 3 + distutils2/tests/fake_dists/banana-0.4.egg/EGG-INFO/not-zip-safe | 1 + distutils2/tests/fake_dists/banana-0.4.egg/EGG-INFO/requires.txt | 6 + distutils2/tests/fake_dists/banana-0.4.egg/EGG-INFO/top_level.txt | Bin distutils2/tests/fake_dists/cheese-2.0.2.egg-info | 5 + distutils2/tests/fake_dists/choxie-2.0.0.9.dist-info/INSTALLER | Bin distutils2/tests/fake_dists/choxie-2.0.0.9.dist-info/METADATA | 9 + distutils2/tests/fake_dists/choxie-2.0.0.9.dist-info/RECORD | Bin distutils2/tests/fake_dists/choxie-2.0.0.9.dist-info/REQUESTED | Bin distutils2/tests/fake_dists/choxie-2.0.0.9/choxie/__init__.py | 1 + distutils2/tests/fake_dists/choxie-2.0.0.9/choxie/chocolate.py | 10 + distutils2/tests/fake_dists/choxie-2.0.0.9/truffles.py | 5 + distutils2/tests/fake_dists/coconuts-aster-10.3.egg-info/PKG-INFO | 5 + distutils2/tests/fake_dists/grammar-1.0a4.dist-info/INSTALLER | Bin distutils2/tests/fake_dists/grammar-1.0a4.dist-info/METADATA | 5 + distutils2/tests/fake_dists/grammar-1.0a4.dist-info/RECORD | Bin distutils2/tests/fake_dists/grammar-1.0a4.dist-info/REQUESTED | Bin distutils2/tests/fake_dists/grammar-1.0a4/grammar/__init__.py | 1 + distutils2/tests/fake_dists/grammar-1.0a4/grammar/utils.py | 8 + distutils2/tests/fake_dists/nut-funkyversion.egg-info | 3 + distutils2/tests/fake_dists/strawberry-0.6.egg | Bin distutils2/tests/fake_dists/towel_stuff-0.1.dist-info/INSTALLER | Bin distutils2/tests/fake_dists/towel_stuff-0.1.dist-info/METADATA | 7 + distutils2/tests/fake_dists/towel_stuff-0.1.dist-info/RECORD | Bin distutils2/tests/fake_dists/towel_stuff-0.1.dist-info/REQUESTED | Bin distutils2/tests/fake_dists/towel_stuff-0.1/towel_stuff/__init__.py | 18 + distutils2/tests/fake_dists/truffles-5.0.egg-info | 3 + distutils2/tests/fixer/fix_idioms.py | 18 +- distutils2/tests/pypi_server.py | 96 +- distutils2/tests/pypi_test_server.py | 59 + distutils2/tests/pypiserver/downloads_with_md5/packages/source/f/foobar/foobar-0.1.tar.gz | Bin distutils2/tests/pypiserver/downloads_with_md5/simple/foobar/foobar-0.1.tar.gz | Bin distutils2/tests/pypiserver/downloads_with_md5/simple/foobar/index.html | 2 +- distutils2/tests/support.py | 433 ++- distutils2/tests/test_ccompiler.py | 8 +- distutils2/tests/test_command_bdist.py | 6 +- distutils2/tests/test_command_bdist_dumb.py | 36 +- distutils2/tests/test_command_bdist_msi.py | 7 +- distutils2/tests/test_command_bdist_wininst.py | 9 +- distutils2/tests/test_command_build.py | 6 +- distutils2/tests/test_command_build_clib.py | 43 +- distutils2/tests/test_command_build_ext.py | 272 +- distutils2/tests/test_command_build_py.py | 34 +- distutils2/tests/test_command_build_scripts.py | 23 +- distutils2/tests/test_command_check.py | 78 +- distutils2/tests/test_command_clean.py | 15 +- distutils2/tests/test_command_cmd.py | 31 +- distutils2/tests/test_command_config.py | 23 +- distutils2/tests/test_command_install_data.py | 45 +- distutils2/tests/test_command_install_dist.py | 52 +- distutils2/tests/test_command_install_distinfo.py | 52 +- distutils2/tests/test_command_install_headers.py | 5 +- distutils2/tests/test_command_install_lib.py | 47 +- distutils2/tests/test_command_install_scripts.py | 29 +- distutils2/tests/test_command_register.py | 95 +- distutils2/tests/test_command_sdist.py | 177 +- distutils2/tests/test_command_test.py | 105 +- distutils2/tests/test_command_upload.py | 54 +- distutils2/tests/test_command_upload_docs.py | 116 +- distutils2/tests/test_compiler.py | 16 +- distutils2/tests/test_config.py | 248 +- distutils2/tests/test_create.py | 243 + distutils2/tests/test_cygwinccompiler.py | 27 +- distutils2/tests/test_database.py | 674 ++++ distutils2/tests/test_depgraph.py | 124 +- distutils2/tests/test_dist.py | 198 +- distutils2/tests/test_extension.py | 2 +- distutils2/tests/test_index_dist.py | 282 - distutils2/tests/test_index_simple.py | 322 -- distutils2/tests/test_index_xmlrpc.py | 109 - distutils2/tests/test_install.py | 177 +- distutils2/tests/test_manifest.py | 80 +- distutils2/tests/test_markers.py | 16 +- distutils2/tests/test_metadata.py | 75 +- distutils2/tests/test_mixin2to3.py | 90 +- distutils2/tests/test_mkcfg.py | 221 - distutils2/tests/test_msvc9compiler.py | 43 +- distutils2/tests/test_pypi_dist.py | 285 + distutils2/tests/test_pypi_server.py | 80 +- distutils2/tests/test_pypi_simple.py | 351 ++ distutils2/tests/test_pypi_xmlrpc.py | 101 + distutils2/tests/test_resources.py | 174 - distutils2/tests/test_run.py | 40 +- distutils2/tests/test_uninstall.py | 99 +- distutils2/tests/test_unixccompiler.py | 25 +- distutils2/tests/test_util.py | 551 +++- distutils2/tests/test_version.py | 19 +- distutils2/tests/xxmodule.c | 379 -- distutils2/util.py | 1450 ++++++++- distutils2/version.py | 16 +- sysconfig.cfg | 111 + sysconfig.py | 766 +++++ sysconfig.pyc | Bin sysconfig.pyo | Bin test_distutils2.py | 5 + 171 files changed, 10808 insertions(+), 7160 deletions(-) diff --git a/distutils2/README b/distutils2/README deleted file mode 100644 --- a/distutils2/README +++ /dev/null @@ -1,13 +0,0 @@ -This directory contains the Distutils2 package. - -There's a full documentation available at: - - http://docs.python.org/distutils/ - -The Distutils-SIG web page is also a good starting point: - - http://www.python.org/sigs/distutils-sig/ - -WARNING: Distutils2 must remain compatible with Python 2.4 - -$Id: README 70017 2009-02-27 12:53:34Z tarek.ziade $ diff --git a/distutils2/__init__.py b/distutils2/__init__.py --- a/distutils2/__init__.py +++ b/distutils2/__init__.py @@ -1,12 +1,14 @@ -"""distutils +"""Support for distutils2, distribution and installation of Python projects. -Third-party tools can use parts of Distutils2 as building blocks +Third-party tools can use parts of distutils2 as building blocks without causing the other modules to be imported: import distutils2.version + import distutils2.metadata import distutils2.pypi.simple import distutils2.tests.pypi_server """ + from logging import getLogger __all__ = ['__version__', 'logger'] diff --git a/distutils2/_trove.py b/distutils2/_trove.py --- a/distutils2/_trove.py +++ b/distutils2/_trove.py @@ -1,4 +1,4 @@ -# Temporary helper for mkcfg. +"""Temporary helper for create.""" # XXX get the list from PyPI and cache it instead of hardcoding @@ -6,547 +6,547 @@ # than a list of strings all_classifiers = [ - 'Development Status :: 1 - Planning', - 'Development Status :: 2 - Pre-Alpha', - 'Development Status :: 3 - Alpha', - 'Development Status :: 4 - Beta', - 'Development Status :: 5 - Production/Stable', - 'Development Status :: 6 - Mature', - 'Development Status :: 7 - Inactive', - 'Environment :: Console', - 'Environment :: Console :: Curses', - 'Environment :: Console :: Framebuffer', - 'Environment :: Console :: Newt', - 'Environment :: Console :: svgalib', - "Environment :: Handhelds/PDA's", - 'Environment :: MacOS X', - 'Environment :: MacOS X :: Aqua', - 'Environment :: MacOS X :: Carbon', - 'Environment :: MacOS X :: Cocoa', - 'Environment :: No Input/Output (Daemon)', - 'Environment :: Other Environment', - 'Environment :: Plugins', - 'Environment :: Web Environment', - 'Environment :: Web Environment :: Buffet', - 'Environment :: Web Environment :: Mozilla', - 'Environment :: Web Environment :: ToscaWidgets', - 'Environment :: Win32 (MS Windows)', - 'Environment :: X11 Applications', - 'Environment :: X11 Applications :: Gnome', - 'Environment :: X11 Applications :: GTK', - 'Environment :: X11 Applications :: KDE', - 'Environment :: X11 Applications :: Qt', - 'Framework :: BFG', - 'Framework :: Buildout', - 'Framework :: Chandler', - 'Framework :: CubicWeb', - 'Framework :: Django', - 'Framework :: IDLE', - 'Framework :: Paste', - 'Framework :: Plone', - 'Framework :: Pylons', - 'Framework :: Setuptools Plugin', - 'Framework :: Trac', - 'Framework :: TurboGears', - 'Framework :: TurboGears :: Applications', - 'Framework :: TurboGears :: Widgets', - 'Framework :: Twisted', - 'Framework :: ZODB', - 'Framework :: Zope2', - 'Framework :: Zope3', - 'Intended Audience :: Customer Service', - 'Intended Audience :: Developers', - 'Intended Audience :: Education', - 'Intended Audience :: End Users/Desktop', - 'Intended Audience :: Financial and Insurance Industry', - 'Intended Audience :: Healthcare Industry', - 'Intended Audience :: Information Technology', - 'Intended Audience :: Legal Industry', - 'Intended Audience :: Manufacturing', - 'Intended Audience :: Other Audience', - 'Intended Audience :: Religion', - 'Intended Audience :: Science/Research', - 'Intended Audience :: System Administrators', - 'Intended Audience :: Telecommunications Industry', - 'License :: Aladdin Free Public License (AFPL)', - 'License :: DFSG approved', - 'License :: Eiffel Forum License (EFL)', - 'License :: Free For Educational Use', - 'License :: Free For Home Use', - 'License :: Free for non-commercial use', - 'License :: Freely Distributable', - 'License :: Free To Use But Restricted', - 'License :: Freeware', - 'License :: Netscape Public License (NPL)', - 'License :: Nokia Open Source License (NOKOS)', - 'License :: OSI Approved', - 'License :: OSI Approved :: Academic Free License (AFL)', - 'License :: OSI Approved :: Apache Software License', - 'License :: OSI Approved :: Apple Public Source License', - 'License :: OSI Approved :: Artistic License', - 'License :: OSI Approved :: Attribution Assurance License', - 'License :: OSI Approved :: BSD License', - 'License :: OSI Approved :: Common Public License', - 'License :: OSI Approved :: Eiffel Forum License', - 'License :: OSI Approved :: European Union Public Licence 1.0 (EUPL 1.0)', - 'License :: OSI Approved :: European Union Public Licence 1.1 (EUPL 1.1)', - 'License :: OSI Approved :: GNU Affero General Public License v3', - 'License :: OSI Approved :: GNU Free Documentation License (FDL)', - 'License :: OSI Approved :: GNU General Public License (GPL)', - 'License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL)', - 'License :: OSI Approved :: IBM Public License', - 'License :: OSI Approved :: Intel Open Source License', - 'License :: OSI Approved :: ISC License (ISCL)', - 'License :: OSI Approved :: Jabber Open Source License', - 'License :: OSI Approved :: MIT License', - 'License :: OSI Approved :: MITRE Collaborative Virtual Workspace License (CVW)', - 'License :: OSI Approved :: Motosoto License', - 'License :: OSI Approved :: Mozilla Public License 1.0 (MPL)', - 'License :: OSI Approved :: Mozilla Public License 1.1 (MPL 1.1)', - 'License :: OSI Approved :: Nethack General Public License', - 'License :: OSI Approved :: Nokia Open Source License', - 'License :: OSI Approved :: Open Group Test Suite License', - 'License :: OSI Approved :: Python License (CNRI Python License)', - 'License :: OSI Approved :: Python Software Foundation License', - 'License :: OSI Approved :: Qt Public License (QPL)', - 'License :: OSI Approved :: Ricoh Source Code Public License', - 'License :: OSI Approved :: Sleepycat License', - 'License :: OSI Approved :: Sun Industry Standards Source License (SISSL)', - 'License :: OSI Approved :: Sun Public License', - 'License :: OSI Approved :: University of Illinois/NCSA Open Source License', - 'License :: OSI Approved :: Vovida Software License 1.0', - 'License :: OSI Approved :: W3C License', - 'License :: OSI Approved :: X.Net License', - 'License :: OSI Approved :: zlib/libpng License', - 'License :: OSI Approved :: Zope Public License', - 'License :: Other/Proprietary License', - 'License :: Public Domain', - 'License :: Repoze Public License', - 'Natural Language :: Afrikaans', - 'Natural Language :: Arabic', - 'Natural Language :: Bengali', - 'Natural Language :: Bosnian', - 'Natural Language :: Bulgarian', - 'Natural Language :: Catalan', - 'Natural Language :: Chinese (Simplified)', - 'Natural Language :: Chinese (Traditional)', - 'Natural Language :: Croatian', - 'Natural Language :: Czech', - 'Natural Language :: Danish', - 'Natural Language :: Dutch', - 'Natural Language :: English', - 'Natural Language :: Esperanto', - 'Natural Language :: Finnish', - 'Natural Language :: French', - 'Natural Language :: German', - 'Natural Language :: Greek', - 'Natural Language :: Hebrew', - 'Natural Language :: Hindi', - 'Natural Language :: Hungarian', - 'Natural Language :: Icelandic', - 'Natural Language :: Indonesian', - 'Natural Language :: Italian', - 'Natural Language :: Japanese', - 'Natural Language :: Javanese', - 'Natural Language :: Korean', - 'Natural Language :: Latin', - 'Natural Language :: Latvian', - 'Natural Language :: Macedonian', - 'Natural Language :: Malay', - 'Natural Language :: Marathi', - 'Natural Language :: Norwegian', - 'Natural Language :: Panjabi', - 'Natural Language :: Persian', - 'Natural Language :: Polish', - 'Natural Language :: Portuguese', - 'Natural Language :: Portuguese (Brazilian)', - 'Natural Language :: Romanian', - 'Natural Language :: Russian', - 'Natural Language :: Serbian', - 'Natural Language :: Slovak', - 'Natural Language :: Slovenian', - 'Natural Language :: Spanish', - 'Natural Language :: Swedish', - 'Natural Language :: Tamil', - 'Natural Language :: Telugu', - 'Natural Language :: Thai', - 'Natural Language :: Turkish', - 'Natural Language :: Ukranian', - 'Natural Language :: Urdu', - 'Natural Language :: Vietnamese', - 'Operating System :: BeOS', - 'Operating System :: MacOS', - 'Operating System :: MacOS :: MacOS 9', - 'Operating System :: MacOS :: MacOS X', - 'Operating System :: Microsoft', - 'Operating System :: Microsoft :: MS-DOS', - 'Operating System :: Microsoft :: Windows', - 'Operating System :: Microsoft :: Windows :: Windows 3.1 or Earlier', - 'Operating System :: Microsoft :: Windows :: Windows 95/98/2000', - 'Operating System :: Microsoft :: Windows :: Windows CE', - 'Operating System :: Microsoft :: Windows :: Windows NT/2000', - 'Operating System :: OS/2', - 'Operating System :: OS Independent', - 'Operating System :: Other OS', - 'Operating System :: PalmOS', - 'Operating System :: PDA Systems', - 'Operating System :: POSIX', - 'Operating System :: POSIX :: AIX', - 'Operating System :: POSIX :: BSD', - 'Operating System :: POSIX :: BSD :: BSD/OS', - 'Operating System :: POSIX :: BSD :: FreeBSD', - 'Operating System :: POSIX :: BSD :: NetBSD', - 'Operating System :: POSIX :: BSD :: OpenBSD', - 'Operating System :: POSIX :: GNU Hurd', - 'Operating System :: POSIX :: HP-UX', - 'Operating System :: POSIX :: IRIX', - 'Operating System :: POSIX :: Linux', - 'Operating System :: POSIX :: Other', - 'Operating System :: POSIX :: SCO', - 'Operating System :: POSIX :: SunOS/Solaris', - 'Operating System :: Unix', - 'Programming Language :: Ada', - 'Programming Language :: APL', - 'Programming Language :: ASP', - 'Programming Language :: Assembly', - 'Programming Language :: Awk', - 'Programming Language :: Basic', - 'Programming Language :: C', - 'Programming Language :: C#', - 'Programming Language :: C++', - 'Programming Language :: Cold Fusion', - 'Programming Language :: Cython', - 'Programming Language :: Delphi/Kylix', - 'Programming Language :: Dylan', - 'Programming Language :: Eiffel', - 'Programming Language :: Emacs-Lisp', - 'Programming Language :: Erlang', - 'Programming Language :: Euler', - 'Programming Language :: Euphoria', - 'Programming Language :: Forth', - 'Programming Language :: Fortran', - 'Programming Language :: Haskell', - 'Programming Language :: Java', - 'Programming Language :: JavaScript', - 'Programming Language :: Lisp', - 'Programming Language :: Logo', - 'Programming Language :: ML', - 'Programming Language :: Modula', - 'Programming Language :: Objective C', - 'Programming Language :: Object Pascal', - 'Programming Language :: OCaml', - 'Programming Language :: Other', - 'Programming Language :: Other Scripting Engines', - 'Programming Language :: Pascal', - 'Programming Language :: Perl', - 'Programming Language :: PHP', - 'Programming Language :: Pike', - 'Programming Language :: Pliant', - 'Programming Language :: PL/SQL', - 'Programming Language :: PROGRESS', - 'Programming Language :: Prolog', - 'Programming Language :: Python', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.3', - 'Programming Language :: Python :: 2.4', - 'Programming Language :: Python :: 2.5', - 'Programming Language :: Python :: 2.6', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.0', - 'Programming Language :: Python :: 3.1', - 'Programming Language :: Python :: 3.2', - 'Programming Language :: REBOL', - 'Programming Language :: Rexx', - 'Programming Language :: Ruby', - 'Programming Language :: Scheme', - 'Programming Language :: Simula', - 'Programming Language :: Smalltalk', - 'Programming Language :: SQL', - 'Programming Language :: Tcl', - 'Programming Language :: Unix Shell', - 'Programming Language :: Visual Basic', - 'Programming Language :: XBasic', - 'Programming Language :: YACC', - 'Programming Language :: Zope', - 'Topic :: Adaptive Technologies', - 'Topic :: Artistic Software', - 'Topic :: Communications', - 'Topic :: Communications :: BBS', - 'Topic :: Communications :: Chat', - 'Topic :: Communications :: Chat :: AOL Instant Messenger', - 'Topic :: Communications :: Chat :: ICQ', - 'Topic :: Communications :: Chat :: Internet Relay Chat', - 'Topic :: Communications :: Chat :: Unix Talk', - 'Topic :: Communications :: Conferencing', - 'Topic :: Communications :: Email', - 'Topic :: Communications :: Email :: Address Book', - 'Topic :: Communications :: Email :: Email Clients (MUA)', - 'Topic :: Communications :: Email :: Filters', - 'Topic :: Communications :: Email :: Mailing List Servers', - 'Topic :: Communications :: Email :: Mail Transport Agents', - 'Topic :: Communications :: Email :: Post-Office', - 'Topic :: Communications :: Email :: Post-Office :: IMAP', - 'Topic :: Communications :: Email :: Post-Office :: POP3', - 'Topic :: Communications :: Fax', - 'Topic :: Communications :: FIDO', - 'Topic :: Communications :: File Sharing', - 'Topic :: Communications :: File Sharing :: Gnutella', - 'Topic :: Communications :: File Sharing :: Napster', - 'Topic :: Communications :: Ham Radio', - 'Topic :: Communications :: Internet Phone', - 'Topic :: Communications :: Telephony', - 'Topic :: Communications :: Usenet News', - 'Topic :: Database', - 'Topic :: Database :: Database Engines/Servers', - 'Topic :: Database :: Front-Ends', - 'Topic :: Desktop Environment', - 'Topic :: Desktop Environment :: File Managers', - 'Topic :: Desktop Environment :: Gnome', - 'Topic :: Desktop Environment :: GNUstep', - 'Topic :: Desktop Environment :: K Desktop Environment (KDE)', - 'Topic :: Desktop Environment :: K Desktop Environment (KDE) :: Themes', - 'Topic :: Desktop Environment :: PicoGUI', - 'Topic :: Desktop Environment :: PicoGUI :: Applications', - 'Topic :: Desktop Environment :: PicoGUI :: Themes', - 'Topic :: Desktop Environment :: Screen Savers', - 'Topic :: Desktop Environment :: Window Managers', - 'Topic :: Desktop Environment :: Window Managers :: Afterstep', - 'Topic :: Desktop Environment :: Window Managers :: Afterstep :: Themes', - 'Topic :: Desktop Environment :: Window Managers :: Applets', - 'Topic :: Desktop Environment :: Window Managers :: Blackbox', - 'Topic :: Desktop Environment :: Window Managers :: Blackbox :: Themes', - 'Topic :: Desktop Environment :: Window Managers :: CTWM', - 'Topic :: Desktop Environment :: Window Managers :: CTWM :: Themes', - 'Topic :: Desktop Environment :: Window Managers :: Enlightenment', - 'Topic :: Desktop Environment :: Window Managers :: Enlightenment :: Epplets', - 'Topic :: Desktop Environment :: Window Managers :: Enlightenment :: Themes DR15', - 'Topic :: Desktop Environment :: Window Managers :: Enlightenment :: Themes DR16', - 'Topic :: Desktop Environment :: Window Managers :: Enlightenment :: Themes DR17', - 'Topic :: Desktop Environment :: Window Managers :: Fluxbox', - 'Topic :: Desktop Environment :: Window Managers :: Fluxbox :: Themes', - 'Topic :: Desktop Environment :: Window Managers :: FVWM', - 'Topic :: Desktop Environment :: Window Managers :: FVWM :: Themes', - 'Topic :: Desktop Environment :: Window Managers :: IceWM', - 'Topic :: Desktop Environment :: Window Managers :: IceWM :: Themes', - 'Topic :: Desktop Environment :: Window Managers :: MetaCity', - 'Topic :: Desktop Environment :: Window Managers :: MetaCity :: Themes', - 'Topic :: Desktop Environment :: Window Managers :: Oroborus', - 'Topic :: Desktop Environment :: Window Managers :: Oroborus :: Themes', - 'Topic :: Desktop Environment :: Window Managers :: Sawfish', - 'Topic :: Desktop Environment :: Window Managers :: Sawfish :: Themes 0.30', - 'Topic :: Desktop Environment :: Window Managers :: Sawfish :: Themes pre-0.30', - 'Topic :: Desktop Environment :: Window Managers :: Waimea', - 'Topic :: Desktop Environment :: Window Managers :: Waimea :: Themes', - 'Topic :: Desktop Environment :: Window Managers :: Window Maker', - 'Topic :: Desktop Environment :: Window Managers :: Window Maker :: Applets', - 'Topic :: Desktop Environment :: Window Managers :: Window Maker :: Themes', - 'Topic :: Desktop Environment :: Window Managers :: XFCE', - 'Topic :: Desktop Environment :: Window Managers :: XFCE :: Themes', - 'Topic :: Documentation', - 'Topic :: Education', - 'Topic :: Education :: Computer Aided Instruction (CAI)', - 'Topic :: Education :: Testing', - 'Topic :: Games/Entertainment', - 'Topic :: Games/Entertainment :: Arcade', - 'Topic :: Games/Entertainment :: Board Games', - 'Topic :: Games/Entertainment :: First Person Shooters', - 'Topic :: Games/Entertainment :: Fortune Cookies', - 'Topic :: Games/Entertainment :: Multi-User Dungeons (MUD)', - 'Topic :: Games/Entertainment :: Puzzle Games', - 'Topic :: Games/Entertainment :: Real Time Strategy', - 'Topic :: Games/Entertainment :: Role-Playing', - 'Topic :: Games/Entertainment :: Side-Scrolling/Arcade Games', - 'Topic :: Games/Entertainment :: Simulation', - 'Topic :: Games/Entertainment :: Turn Based Strategy', - 'Topic :: Home Automation', - 'Topic :: Internet', - 'Topic :: Internet :: File Transfer Protocol (FTP)', - 'Topic :: Internet :: Finger', - 'Topic :: Internet :: Log Analysis', - 'Topic :: Internet :: Name Service (DNS)', - 'Topic :: Internet :: Proxy Servers', - 'Topic :: Internet :: WAP', - 'Topic :: Internet :: WWW/HTTP', - 'Topic :: Internet :: WWW/HTTP :: Browsers', - 'Topic :: Internet :: WWW/HTTP :: Dynamic Content', - 'Topic :: Internet :: WWW/HTTP :: Dynamic Content :: CGI Tools/Libraries', - 'Topic :: Internet :: WWW/HTTP :: Dynamic Content :: Message Boards', - 'Topic :: Internet :: WWW/HTTP :: Dynamic Content :: News/Diary', - 'Topic :: Internet :: WWW/HTTP :: Dynamic Content :: Page Counters', - 'Topic :: Internet :: WWW/HTTP :: HTTP Servers', - 'Topic :: Internet :: WWW/HTTP :: Indexing/Search', - 'Topic :: Internet :: WWW/HTTP :: Site Management', - 'Topic :: Internet :: WWW/HTTP :: Site Management :: Link Checking', - 'Topic :: Internet :: WWW/HTTP :: WSGI', - 'Topic :: Internet :: WWW/HTTP :: WSGI :: Application', - 'Topic :: Internet :: WWW/HTTP :: WSGI :: Middleware', - 'Topic :: Internet :: WWW/HTTP :: WSGI :: Server', - 'Topic :: Internet :: Z39.50', - 'Topic :: Multimedia', - 'Topic :: Multimedia :: Graphics', - 'Topic :: Multimedia :: Graphics :: 3D Modeling', - 'Topic :: Multimedia :: Graphics :: 3D Rendering', - 'Topic :: Multimedia :: Graphics :: Capture', - 'Topic :: Multimedia :: Graphics :: Capture :: Digital Camera', - 'Topic :: Multimedia :: Graphics :: Capture :: Scanners', - 'Topic :: Multimedia :: Graphics :: Capture :: Screen Capture', - 'Topic :: Multimedia :: Graphics :: Editors', - 'Topic :: Multimedia :: Graphics :: Editors :: Raster-Based', - 'Topic :: Multimedia :: Graphics :: Editors :: Vector-Based', - 'Topic :: Multimedia :: Graphics :: Graphics Conversion', - 'Topic :: Multimedia :: Graphics :: Presentation', - 'Topic :: Multimedia :: Graphics :: Viewers', - 'Topic :: Multimedia :: Sound/Audio', - 'Topic :: Multimedia :: Sound/Audio :: Analysis', - 'Topic :: Multimedia :: Sound/Audio :: Capture/Recording', - 'Topic :: Multimedia :: Sound/Audio :: CD Audio', - 'Topic :: Multimedia :: Sound/Audio :: CD Audio :: CD Playing', - 'Topic :: Multimedia :: Sound/Audio :: CD Audio :: CD Ripping', - 'Topic :: Multimedia :: Sound/Audio :: CD Audio :: CD Writing', - 'Topic :: Multimedia :: Sound/Audio :: Conversion', - 'Topic :: Multimedia :: Sound/Audio :: Editors', - 'Topic :: Multimedia :: Sound/Audio :: MIDI', - 'Topic :: Multimedia :: Sound/Audio :: Mixers', - 'Topic :: Multimedia :: Sound/Audio :: Players', - 'Topic :: Multimedia :: Sound/Audio :: Players :: MP3', - 'Topic :: Multimedia :: Sound/Audio :: Sound Synthesis', - 'Topic :: Multimedia :: Sound/Audio :: Speech', - 'Topic :: Multimedia :: Video', - 'Topic :: Multimedia :: Video :: Capture', - 'Topic :: Multimedia :: Video :: Conversion', - 'Topic :: Multimedia :: Video :: Display', - 'Topic :: Multimedia :: Video :: Non-Linear Editor', - 'Topic :: Office/Business', - 'Topic :: Office/Business :: Financial', - 'Topic :: Office/Business :: Financial :: Accounting', - 'Topic :: Office/Business :: Financial :: Investment', - 'Topic :: Office/Business :: Financial :: Point-Of-Sale', - 'Topic :: Office/Business :: Financial :: Spreadsheet', - 'Topic :: Office/Business :: Groupware', - 'Topic :: Office/Business :: News/Diary', - 'Topic :: Office/Business :: Office Suites', - 'Topic :: Office/Business :: Scheduling', - 'Topic :: Other/Nonlisted Topic', - 'Topic :: Printing', - 'Topic :: Religion', - 'Topic :: Scientific/Engineering', - 'Topic :: Scientific/Engineering :: Artificial Intelligence', - 'Topic :: Scientific/Engineering :: Astronomy', - 'Topic :: Scientific/Engineering :: Atmospheric Science', - 'Topic :: Scientific/Engineering :: Bio-Informatics', - 'Topic :: Scientific/Engineering :: Chemistry', - 'Topic :: Scientific/Engineering :: Electronic Design Automation (EDA)', - 'Topic :: Scientific/Engineering :: GIS', - 'Topic :: Scientific/Engineering :: Human Machine Interfaces', - 'Topic :: Scientific/Engineering :: Image Recognition', - 'Topic :: Scientific/Engineering :: Information Analysis', - 'Topic :: Scientific/Engineering :: Interface Engine/Protocol Translator', - 'Topic :: Scientific/Engineering :: Mathematics', - 'Topic :: Scientific/Engineering :: Medical Science Apps.', - 'Topic :: Scientific/Engineering :: Physics', - 'Topic :: Scientific/Engineering :: Visualization', - 'Topic :: Security', - 'Topic :: Security :: Cryptography', - 'Topic :: Sociology', - 'Topic :: Sociology :: Genealogy', - 'Topic :: Sociology :: History', - 'Topic :: Software Development', - 'Topic :: Software Development :: Assemblers', - 'Topic :: Software Development :: Bug Tracking', - 'Topic :: Software Development :: Build Tools', - 'Topic :: Software Development :: Code Generators', - 'Topic :: Software Development :: Compilers', - 'Topic :: Software Development :: Debuggers', - 'Topic :: Software Development :: Disassemblers', - 'Topic :: Software Development :: Documentation', - 'Topic :: Software Development :: Embedded Systems', - 'Topic :: Software Development :: Internationalization', - 'Topic :: Software Development :: Interpreters', - 'Topic :: Software Development :: Libraries', - 'Topic :: Software Development :: Libraries :: Application Frameworks', - 'Topic :: Software Development :: Libraries :: Java Libraries', - 'Topic :: Software Development :: Libraries :: Perl Modules', - 'Topic :: Software Development :: Libraries :: PHP Classes', - 'Topic :: Software Development :: Libraries :: Pike Modules', - 'Topic :: Software Development :: Libraries :: pygame', - 'Topic :: Software Development :: Libraries :: Python Modules', - 'Topic :: Software Development :: Libraries :: Ruby Modules', - 'Topic :: Software Development :: Libraries :: Tcl Extensions', - 'Topic :: Software Development :: Localization', - 'Topic :: Software Development :: Object Brokering', - 'Topic :: Software Development :: Object Brokering :: CORBA', - 'Topic :: Software Development :: Pre-processors', - 'Topic :: Software Development :: Quality Assurance', - 'Topic :: Software Development :: Testing', - 'Topic :: Software Development :: Testing :: Traffic Generation', - 'Topic :: Software Development :: User Interfaces', - 'Topic :: Software Development :: Version Control', - 'Topic :: Software Development :: Version Control :: CVS', - 'Topic :: Software Development :: Version Control :: RCS', - 'Topic :: Software Development :: Version Control :: SCCS', - 'Topic :: Software Development :: Widget Sets', - 'Topic :: System', - 'Topic :: System :: Archiving', - 'Topic :: System :: Archiving :: Backup', - 'Topic :: System :: Archiving :: Compression', - 'Topic :: System :: Archiving :: Mirroring', - 'Topic :: System :: Archiving :: Packaging', - 'Topic :: System :: Benchmark', - 'Topic :: System :: Boot', - 'Topic :: System :: Boot :: Init', - 'Topic :: System :: Clustering', - 'Topic :: System :: Console Fonts', - 'Topic :: System :: Distributed Computing', - 'Topic :: System :: Emulators', - 'Topic :: System :: Filesystems', - 'Topic :: System :: Hardware', - 'Topic :: System :: Hardware :: Hardware Drivers', - 'Topic :: System :: Hardware :: Mainframes', - 'Topic :: System :: Hardware :: Symmetric Multi-processing', - 'Topic :: System :: Installation/Setup', - 'Topic :: System :: Logging', - 'Topic :: System :: Monitoring', - 'Topic :: System :: Networking', - 'Topic :: System :: Networking :: Firewalls', - 'Topic :: System :: Networking :: Monitoring', - 'Topic :: System :: Networking :: Monitoring :: Hardware Watchdog', - 'Topic :: System :: Networking :: Time Synchronization', - 'Topic :: System :: Operating System', - 'Topic :: System :: Operating System Kernels', - 'Topic :: System :: Operating System Kernels :: BSD', - 'Topic :: System :: Operating System Kernels :: GNU Hurd', - 'Topic :: System :: Operating System Kernels :: Linux', - 'Topic :: System :: Power (UPS)', - 'Topic :: System :: Recovery Tools', - 'Topic :: System :: Shells', - 'Topic :: System :: Software Distribution', - 'Topic :: System :: Systems Administration', - 'Topic :: System :: Systems Administration :: Authentication/Directory', - 'Topic :: System :: Systems Administration :: Authentication/Directory :: LDAP', - 'Topic :: System :: Systems Administration :: Authentication/Directory :: NIS', - 'Topic :: System :: System Shells', - 'Topic :: Terminals', - 'Topic :: Terminals :: Serial', - 'Topic :: Terminals :: Telnet', - 'Topic :: Terminals :: Terminal Emulators/X Terminals', - 'Topic :: Text Editors', - 'Topic :: Text Editors :: Documentation', - 'Topic :: Text Editors :: Emacs', - 'Topic :: Text Editors :: Integrated Development Environments (IDE)', - 'Topic :: Text Editors :: Text Processing', - 'Topic :: Text Editors :: Word Processors', - 'Topic :: Text Processing', - 'Topic :: Text Processing :: Filters', - 'Topic :: Text Processing :: Fonts', - 'Topic :: Text Processing :: General', - 'Topic :: Text Processing :: Indexing', - 'Topic :: Text Processing :: Linguistic', - 'Topic :: Text Processing :: Markup', - 'Topic :: Text Processing :: Markup :: HTML', - 'Topic :: Text Processing :: Markup :: LaTeX', - 'Topic :: Text Processing :: Markup :: SGML', - 'Topic :: Text Processing :: Markup :: VRML', - 'Topic :: Text Processing :: Markup :: XML', - 'Topic :: Utilities', +'Development Status :: 1 - Planning', +'Development Status :: 2 - Pre-Alpha', +'Development Status :: 3 - Alpha', +'Development Status :: 4 - Beta', +'Development Status :: 5 - Production/Stable', +'Development Status :: 6 - Mature', +'Development Status :: 7 - Inactive', +'Environment :: Console', +'Environment :: Console :: Curses', +'Environment :: Console :: Framebuffer', +'Environment :: Console :: Newt', +'Environment :: Console :: svgalib', +"Environment :: Handhelds/PDA's", +'Environment :: MacOS X', +'Environment :: MacOS X :: Aqua', +'Environment :: MacOS X :: Carbon', +'Environment :: MacOS X :: Cocoa', +'Environment :: No Input/Output (Daemon)', +'Environment :: Other Environment', +'Environment :: Plugins', +'Environment :: Web Environment', +'Environment :: Web Environment :: Buffet', +'Environment :: Web Environment :: Mozilla', +'Environment :: Web Environment :: ToscaWidgets', +'Environment :: Win32 (MS Windows)', +'Environment :: X11 Applications', +'Environment :: X11 Applications :: Gnome', +'Environment :: X11 Applications :: GTK', +'Environment :: X11 Applications :: KDE', +'Environment :: X11 Applications :: Qt', +'Framework :: BFG', +'Framework :: Buildout', +'Framework :: Chandler', +'Framework :: CubicWeb', +'Framework :: Django', +'Framework :: IDLE', +'Framework :: Paste', +'Framework :: Plone', +'Framework :: Pylons', +'Framework :: Setuptools Plugin', +'Framework :: Trac', +'Framework :: TurboGears', +'Framework :: TurboGears :: Applications', +'Framework :: TurboGears :: Widgets', +'Framework :: Twisted', +'Framework :: ZODB', +'Framework :: Zope2', +'Framework :: Zope3', +'Intended Audience :: Customer Service', +'Intended Audience :: Developers', +'Intended Audience :: Education', +'Intended Audience :: End Users/Desktop', +'Intended Audience :: Financial and Insurance Industry', +'Intended Audience :: Healthcare Industry', +'Intended Audience :: Information Technology', +'Intended Audience :: Legal Industry', +'Intended Audience :: Manufacturing', +'Intended Audience :: Other Audience', +'Intended Audience :: Religion', +'Intended Audience :: Science/Research', +'Intended Audience :: System Administrators', +'Intended Audience :: Telecommunications Industry', +'License :: Aladdin Free Public License (AFPL)', +'License :: DFSG approved', +'License :: Eiffel Forum License (EFL)', +'License :: Free For Educational Use', +'License :: Free For Home Use', +'License :: Free for non-commercial use', +'License :: Freely Distributable', +'License :: Free To Use But Restricted', +'License :: Freeware', +'License :: Netscape Public License (NPL)', +'License :: Nokia Open Source License (NOKOS)', +'License :: OSI Approved', +'License :: OSI Approved :: Academic Free License (AFL)', +'License :: OSI Approved :: Apache Software License', +'License :: OSI Approved :: Apple Public Source License', +'License :: OSI Approved :: Artistic License', +'License :: OSI Approved :: Attribution Assurance License', +'License :: OSI Approved :: BSD License', +'License :: OSI Approved :: Common Public License', +'License :: OSI Approved :: Eiffel Forum License', +'License :: OSI Approved :: European Union Public Licence 1.0 (EUPL 1.0)', +'License :: OSI Approved :: European Union Public Licence 1.1 (EUPL 1.1)', +'License :: OSI Approved :: GNU Affero General Public License v3', +'License :: OSI Approved :: GNU Free Documentation License (FDL)', +'License :: OSI Approved :: GNU General Public License (GPL)', +'License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL)', +'License :: OSI Approved :: IBM Public License', +'License :: OSI Approved :: Intel Open Source License', +'License :: OSI Approved :: ISC License (ISCL)', +'License :: OSI Approved :: Jabber Open Source License', +'License :: OSI Approved :: MIT License', +'License :: OSI Approved :: MITRE Collaborative Virtual Workspace License (CVW)', +'License :: OSI Approved :: Motosoto License', +'License :: OSI Approved :: Mozilla Public License 1.0 (MPL)', +'License :: OSI Approved :: Mozilla Public License 1.1 (MPL 1.1)', +'License :: OSI Approved :: Nethack General Public License', +'License :: OSI Approved :: Nokia Open Source License', +'License :: OSI Approved :: Open Group Test Suite License', +'License :: OSI Approved :: Python License (CNRI Python License)', +'License :: OSI Approved :: Python Software Foundation License', +'License :: OSI Approved :: Qt Public License (QPL)', +'License :: OSI Approved :: Ricoh Source Code Public License', +'License :: OSI Approved :: Sleepycat License', +'License :: OSI Approved :: Sun Industry Standards Source License (SISSL)', +'License :: OSI Approved :: Sun Public License', +'License :: OSI Approved :: University of Illinois/NCSA Open Source License', +'License :: OSI Approved :: Vovida Software License 1.0', +'License :: OSI Approved :: W3C License', +'License :: OSI Approved :: X.Net License', +'License :: OSI Approved :: zlib/libpng License', +'License :: OSI Approved :: Zope Public License', +'License :: Other/Proprietary License', +'License :: Public Domain', +'License :: Repoze Public License', +'Natural Language :: Afrikaans', +'Natural Language :: Arabic', +'Natural Language :: Bengali', +'Natural Language :: Bosnian', +'Natural Language :: Bulgarian', +'Natural Language :: Catalan', +'Natural Language :: Chinese (Simplified)', +'Natural Language :: Chinese (Traditional)', +'Natural Language :: Croatian', +'Natural Language :: Czech', +'Natural Language :: Danish', +'Natural Language :: Dutch', +'Natural Language :: English', +'Natural Language :: Esperanto', +'Natural Language :: Finnish', +'Natural Language :: French', +'Natural Language :: German', +'Natural Language :: Greek', +'Natural Language :: Hebrew', +'Natural Language :: Hindi', +'Natural Language :: Hungarian', +'Natural Language :: Icelandic', +'Natural Language :: Indonesian', +'Natural Language :: Italian', +'Natural Language :: Japanese', +'Natural Language :: Javanese', +'Natural Language :: Korean', +'Natural Language :: Latin', +'Natural Language :: Latvian', +'Natural Language :: Macedonian', +'Natural Language :: Malay', +'Natural Language :: Marathi', +'Natural Language :: Norwegian', +'Natural Language :: Panjabi', +'Natural Language :: Persian', +'Natural Language :: Polish', +'Natural Language :: Portuguese', +'Natural Language :: Portuguese (Brazilian)', +'Natural Language :: Romanian', +'Natural Language :: Russian', +'Natural Language :: Serbian', +'Natural Language :: Slovak', +'Natural Language :: Slovenian', +'Natural Language :: Spanish', +'Natural Language :: Swedish', +'Natural Language :: Tamil', +'Natural Language :: Telugu', +'Natural Language :: Thai', +'Natural Language :: Turkish', +'Natural Language :: Ukranian', +'Natural Language :: Urdu', +'Natural Language :: Vietnamese', +'Operating System :: BeOS', +'Operating System :: MacOS', +'Operating System :: MacOS :: MacOS 9', +'Operating System :: MacOS :: MacOS X', +'Operating System :: Microsoft', +'Operating System :: Microsoft :: MS-DOS', +'Operating System :: Microsoft :: Windows', +'Operating System :: Microsoft :: Windows :: Windows 3.1 or Earlier', +'Operating System :: Microsoft :: Windows :: Windows 95/98/2000', +'Operating System :: Microsoft :: Windows :: Windows CE', +'Operating System :: Microsoft :: Windows :: Windows NT/2000', +'Operating System :: OS/2', +'Operating System :: OS Independent', +'Operating System :: Other OS', +'Operating System :: PalmOS', +'Operating System :: PDA Systems', +'Operating System :: POSIX', +'Operating System :: POSIX :: AIX', +'Operating System :: POSIX :: BSD', +'Operating System :: POSIX :: BSD :: BSD/OS', +'Operating System :: POSIX :: BSD :: FreeBSD', +'Operating System :: POSIX :: BSD :: NetBSD', +'Operating System :: POSIX :: BSD :: OpenBSD', +'Operating System :: POSIX :: GNU Hurd', +'Operating System :: POSIX :: HP-UX', +'Operating System :: POSIX :: IRIX', +'Operating System :: POSIX :: Linux', +'Operating System :: POSIX :: Other', +'Operating System :: POSIX :: SCO', +'Operating System :: POSIX :: SunOS/Solaris', +'Operating System :: Unix', +'Programming Language :: Ada', +'Programming Language :: APL', +'Programming Language :: ASP', +'Programming Language :: Assembly', +'Programming Language :: Awk', +'Programming Language :: Basic', +'Programming Language :: C', +'Programming Language :: C#', +'Programming Language :: C++', +'Programming Language :: Cold Fusion', +'Programming Language :: Cython', +'Programming Language :: Delphi/Kylix', +'Programming Language :: Dylan', +'Programming Language :: Eiffel', +'Programming Language :: Emacs-Lisp', +'Programming Language :: Erlang', +'Programming Language :: Euler', +'Programming Language :: Euphoria', +'Programming Language :: Forth', +'Programming Language :: Fortran', +'Programming Language :: Haskell', +'Programming Language :: Java', +'Programming Language :: JavaScript', +'Programming Language :: Lisp', +'Programming Language :: Logo', +'Programming Language :: ML', +'Programming Language :: Modula', +'Programming Language :: Objective C', +'Programming Language :: Object Pascal', +'Programming Language :: OCaml', +'Programming Language :: Other', +'Programming Language :: Other Scripting Engines', +'Programming Language :: Pascal', +'Programming Language :: Perl', +'Programming Language :: PHP', +'Programming Language :: Pike', +'Programming Language :: Pliant', +'Programming Language :: PL/SQL', +'Programming Language :: PROGRESS', +'Programming Language :: Prolog', +'Programming Language :: Python', +'Programming Language :: Python :: 2', +'Programming Language :: Python :: 2.3', +'Programming Language :: Python :: 2.4', +'Programming Language :: Python :: 2.5', +'Programming Language :: Python :: 2.6', +'Programming Language :: Python :: 2.7', +'Programming Language :: Python :: 3', +'Programming Language :: Python :: 3.0', +'Programming Language :: Python :: 3.1', +'Programming Language :: Python :: 3.2', +'Programming Language :: REBOL', +'Programming Language :: Rexx', +'Programming Language :: Ruby', +'Programming Language :: Scheme', +'Programming Language :: Simula', +'Programming Language :: Smalltalk', +'Programming Language :: SQL', +'Programming Language :: Tcl', +'Programming Language :: Unix Shell', +'Programming Language :: Visual Basic', +'Programming Language :: XBasic', +'Programming Language :: YACC', +'Programming Language :: Zope', +'Topic :: Adaptive Technologies', +'Topic :: Artistic Software', +'Topic :: Communications', +'Topic :: Communications :: BBS', +'Topic :: Communications :: Chat', +'Topic :: Communications :: Chat :: AOL Instant Messenger', +'Topic :: Communications :: Chat :: ICQ', +'Topic :: Communications :: Chat :: Internet Relay Chat', +'Topic :: Communications :: Chat :: Unix Talk', +'Topic :: Communications :: Conferencing', +'Topic :: Communications :: Email', +'Topic :: Communications :: Email :: Address Book', +'Topic :: Communications :: Email :: Email Clients (MUA)', +'Topic :: Communications :: Email :: Filters', +'Topic :: Communications :: Email :: Mailing List Servers', +'Topic :: Communications :: Email :: Mail Transport Agents', +'Topic :: Communications :: Email :: Post-Office', +'Topic :: Communications :: Email :: Post-Office :: IMAP', +'Topic :: Communications :: Email :: Post-Office :: POP3', +'Topic :: Communications :: Fax', +'Topic :: Communications :: FIDO', +'Topic :: Communications :: File Sharing', +'Topic :: Communications :: File Sharing :: Gnutella', +'Topic :: Communications :: File Sharing :: Napster', +'Topic :: Communications :: Ham Radio', +'Topic :: Communications :: Internet Phone', +'Topic :: Communications :: Telephony', +'Topic :: Communications :: Usenet News', +'Topic :: Database', +'Topic :: Database :: Database Engines/Servers', +'Topic :: Database :: Front-Ends', +'Topic :: Desktop Environment', +'Topic :: Desktop Environment :: File Managers', +'Topic :: Desktop Environment :: Gnome', +'Topic :: Desktop Environment :: GNUstep', +'Topic :: Desktop Environment :: K Desktop Environment (KDE)', +'Topic :: Desktop Environment :: K Desktop Environment (KDE) :: Themes', +'Topic :: Desktop Environment :: PicoGUI', +'Topic :: Desktop Environment :: PicoGUI :: Applications', +'Topic :: Desktop Environment :: PicoGUI :: Themes', +'Topic :: Desktop Environment :: Screen Savers', +'Topic :: Desktop Environment :: Window Managers', +'Topic :: Desktop Environment :: Window Managers :: Afterstep', +'Topic :: Desktop Environment :: Window Managers :: Afterstep :: Themes', +'Topic :: Desktop Environment :: Window Managers :: Applets', +'Topic :: Desktop Environment :: Window Managers :: Blackbox', +'Topic :: Desktop Environment :: Window Managers :: Blackbox :: Themes', +'Topic :: Desktop Environment :: Window Managers :: CTWM', +'Topic :: Desktop Environment :: Window Managers :: CTWM :: Themes', +'Topic :: Desktop Environment :: Window Managers :: Enlightenment', +'Topic :: Desktop Environment :: Window Managers :: Enlightenment :: Epplets', +'Topic :: Desktop Environment :: Window Managers :: Enlightenment :: Themes DR15', +'Topic :: Desktop Environment :: Window Managers :: Enlightenment :: Themes DR16', +'Topic :: Desktop Environment :: Window Managers :: Enlightenment :: Themes DR17', +'Topic :: Desktop Environment :: Window Managers :: Fluxbox', +'Topic :: Desktop Environment :: Window Managers :: Fluxbox :: Themes', +'Topic :: Desktop Environment :: Window Managers :: FVWM', +'Topic :: Desktop Environment :: Window Managers :: FVWM :: Themes', +'Topic :: Desktop Environment :: Window Managers :: IceWM', +'Topic :: Desktop Environment :: Window Managers :: IceWM :: Themes', +'Topic :: Desktop Environment :: Window Managers :: MetaCity', +'Topic :: Desktop Environment :: Window Managers :: MetaCity :: Themes', +'Topic :: Desktop Environment :: Window Managers :: Oroborus', +'Topic :: Desktop Environment :: Window Managers :: Oroborus :: Themes', +'Topic :: Desktop Environment :: Window Managers :: Sawfish', +'Topic :: Desktop Environment :: Window Managers :: Sawfish :: Themes 0.30', +'Topic :: Desktop Environment :: Window Managers :: Sawfish :: Themes pre-0.30', +'Topic :: Desktop Environment :: Window Managers :: Waimea', +'Topic :: Desktop Environment :: Window Managers :: Waimea :: Themes', +'Topic :: Desktop Environment :: Window Managers :: Window Maker', +'Topic :: Desktop Environment :: Window Managers :: Window Maker :: Applets', +'Topic :: Desktop Environment :: Window Managers :: Window Maker :: Themes', +'Topic :: Desktop Environment :: Window Managers :: XFCE', +'Topic :: Desktop Environment :: Window Managers :: XFCE :: Themes', +'Topic :: Documentation', +'Topic :: Education', +'Topic :: Education :: Computer Aided Instruction (CAI)', +'Topic :: Education :: Testing', +'Topic :: Games/Entertainment', +'Topic :: Games/Entertainment :: Arcade', +'Topic :: Games/Entertainment :: Board Games', +'Topic :: Games/Entertainment :: First Person Shooters', +'Topic :: Games/Entertainment :: Fortune Cookies', +'Topic :: Games/Entertainment :: Multi-User Dungeons (MUD)', +'Topic :: Games/Entertainment :: Puzzle Games', +'Topic :: Games/Entertainment :: Real Time Strategy', +'Topic :: Games/Entertainment :: Role-Playing', +'Topic :: Games/Entertainment :: Side-Scrolling/Arcade Games', +'Topic :: Games/Entertainment :: Simulation', +'Topic :: Games/Entertainment :: Turn Based Strategy', +'Topic :: Home Automation', +'Topic :: Internet', +'Topic :: Internet :: File Transfer Protocol (FTP)', +'Topic :: Internet :: Finger', +'Topic :: Internet :: Log Analysis', +'Topic :: Internet :: Name Service (DNS)', +'Topic :: Internet :: Proxy Servers', +'Topic :: Internet :: WAP', +'Topic :: Internet :: WWW/HTTP', +'Topic :: Internet :: WWW/HTTP :: Browsers', +'Topic :: Internet :: WWW/HTTP :: Dynamic Content', +'Topic :: Internet :: WWW/HTTP :: Dynamic Content :: CGI Tools/Libraries', +'Topic :: Internet :: WWW/HTTP :: Dynamic Content :: Message Boards', +'Topic :: Internet :: WWW/HTTP :: Dynamic Content :: News/Diary', +'Topic :: Internet :: WWW/HTTP :: Dynamic Content :: Page Counters', +'Topic :: Internet :: WWW/HTTP :: HTTP Servers', +'Topic :: Internet :: WWW/HTTP :: Indexing/Search', +'Topic :: Internet :: WWW/HTTP :: Site Management', +'Topic :: Internet :: WWW/HTTP :: Site Management :: Link Checking', +'Topic :: Internet :: WWW/HTTP :: WSGI', +'Topic :: Internet :: WWW/HTTP :: WSGI :: Application', +'Topic :: Internet :: WWW/HTTP :: WSGI :: Middleware', +'Topic :: Internet :: WWW/HTTP :: WSGI :: Server', +'Topic :: Internet :: Z39.50', +'Topic :: Multimedia', +'Topic :: Multimedia :: Graphics', +'Topic :: Multimedia :: Graphics :: 3D Modeling', +'Topic :: Multimedia :: Graphics :: 3D Rendering', +'Topic :: Multimedia :: Graphics :: Capture', +'Topic :: Multimedia :: Graphics :: Capture :: Digital Camera', +'Topic :: Multimedia :: Graphics :: Capture :: Scanners', +'Topic :: Multimedia :: Graphics :: Capture :: Screen Capture', +'Topic :: Multimedia :: Graphics :: Editors', +'Topic :: Multimedia :: Graphics :: Editors :: Raster-Based', +'Topic :: Multimedia :: Graphics :: Editors :: Vector-Based', +'Topic :: Multimedia :: Graphics :: Graphics Conversion', +'Topic :: Multimedia :: Graphics :: Presentation', +'Topic :: Multimedia :: Graphics :: Viewers', +'Topic :: Multimedia :: Sound/Audio', +'Topic :: Multimedia :: Sound/Audio :: Analysis', +'Topic :: Multimedia :: Sound/Audio :: Capture/Recording', +'Topic :: Multimedia :: Sound/Audio :: CD Audio', +'Topic :: Multimedia :: Sound/Audio :: CD Audio :: CD Playing', +'Topic :: Multimedia :: Sound/Audio :: CD Audio :: CD Ripping', +'Topic :: Multimedia :: Sound/Audio :: CD Audio :: CD Writing', +'Topic :: Multimedia :: Sound/Audio :: Conversion', +'Topic :: Multimedia :: Sound/Audio :: Editors', +'Topic :: Multimedia :: Sound/Audio :: MIDI', +'Topic :: Multimedia :: Sound/Audio :: Mixers', +'Topic :: Multimedia :: Sound/Audio :: Players', +'Topic :: Multimedia :: Sound/Audio :: Players :: MP3', +'Topic :: Multimedia :: Sound/Audio :: Sound Synthesis', +'Topic :: Multimedia :: Sound/Audio :: Speech', +'Topic :: Multimedia :: Video', +'Topic :: Multimedia :: Video :: Capture', +'Topic :: Multimedia :: Video :: Conversion', +'Topic :: Multimedia :: Video :: Display', +'Topic :: Multimedia :: Video :: Non-Linear Editor', +'Topic :: Office/Business', +'Topic :: Office/Business :: Financial', +'Topic :: Office/Business :: Financial :: Accounting', +'Topic :: Office/Business :: Financial :: Investment', +'Topic :: Office/Business :: Financial :: Point-Of-Sale', +'Topic :: Office/Business :: Financial :: Spreadsheet', +'Topic :: Office/Business :: Groupware', +'Topic :: Office/Business :: News/Diary', +'Topic :: Office/Business :: Office Suites', +'Topic :: Office/Business :: Scheduling', +'Topic :: Other/Nonlisted Topic', +'Topic :: Printing', +'Topic :: Religion', +'Topic :: Scientific/Engineering', +'Topic :: Scientific/Engineering :: Artificial Intelligence', +'Topic :: Scientific/Engineering :: Astronomy', +'Topic :: Scientific/Engineering :: Atmospheric Science', +'Topic :: Scientific/Engineering :: Bio-Informatics', +'Topic :: Scientific/Engineering :: Chemistry', +'Topic :: Scientific/Engineering :: Electronic Design Automation (EDA)', +'Topic :: Scientific/Engineering :: GIS', +'Topic :: Scientific/Engineering :: Human Machine Interfaces', +'Topic :: Scientific/Engineering :: Image Recognition', +'Topic :: Scientific/Engineering :: Information Analysis', +'Topic :: Scientific/Engineering :: Interface Engine/Protocol Translator', +'Topic :: Scientific/Engineering :: Mathematics', +'Topic :: Scientific/Engineering :: Medical Science Apps.', +'Topic :: Scientific/Engineering :: Physics', +'Topic :: Scientific/Engineering :: Visualization', +'Topic :: Security', +'Topic :: Security :: Cryptography', +'Topic :: Sociology', +'Topic :: Sociology :: Genealogy', +'Topic :: Sociology :: History', +'Topic :: Software Development', +'Topic :: Software Development :: Assemblers', +'Topic :: Software Development :: Bug Tracking', +'Topic :: Software Development :: Build Tools', +'Topic :: Software Development :: Code Generators', +'Topic :: Software Development :: Compilers', +'Topic :: Software Development :: Debuggers', +'Topic :: Software Development :: Disassemblers', +'Topic :: Software Development :: Documentation', +'Topic :: Software Development :: Embedded Systems', +'Topic :: Software Development :: Internationalization', +'Topic :: Software Development :: Interpreters', +'Topic :: Software Development :: Libraries', +'Topic :: Software Development :: Libraries :: Application Frameworks', +'Topic :: Software Development :: Libraries :: Java Libraries', +'Topic :: Software Development :: Libraries :: Perl Modules', +'Topic :: Software Development :: Libraries :: PHP Classes', +'Topic :: Software Development :: Libraries :: Pike Modules', +'Topic :: Software Development :: Libraries :: pygame', +'Topic :: Software Development :: Libraries :: Python Modules', +'Topic :: Software Development :: Libraries :: Ruby Modules', +'Topic :: Software Development :: Libraries :: Tcl Extensions', +'Topic :: Software Development :: Localization', +'Topic :: Software Development :: Object Brokering', +'Topic :: Software Development :: Object Brokering :: CORBA', +'Topic :: Software Development :: Pre-processors', +'Topic :: Software Development :: Quality Assurance', +'Topic :: Software Development :: Testing', +'Topic :: Software Development :: Testing :: Traffic Generation', +'Topic :: Software Development :: User Interfaces', +'Topic :: Software Development :: Version Control', +'Topic :: Software Development :: Version Control :: CVS', +'Topic :: Software Development :: Version Control :: RCS', +'Topic :: Software Development :: Version Control :: SCCS', +'Topic :: Software Development :: Widget Sets', +'Topic :: System', +'Topic :: System :: Archiving', +'Topic :: System :: Archiving :: Backup', +'Topic :: System :: Archiving :: Compression', +'Topic :: System :: Archiving :: Mirroring', +'Topic :: System :: Archiving :: Packaging', +'Topic :: System :: Benchmark', +'Topic :: System :: Boot', +'Topic :: System :: Boot :: Init', +'Topic :: System :: Clustering', +'Topic :: System :: Console Fonts', +'Topic :: System :: Distributed Computing', +'Topic :: System :: Emulators', +'Topic :: System :: Filesystems', +'Topic :: System :: Hardware', +'Topic :: System :: Hardware :: Hardware Drivers', +'Topic :: System :: Hardware :: Mainframes', +'Topic :: System :: Hardware :: Symmetric Multi-processing', +'Topic :: System :: Installation/Setup', +'Topic :: System :: Logging', +'Topic :: System :: Monitoring', +'Topic :: System :: Networking', +'Topic :: System :: Networking :: Firewalls', +'Topic :: System :: Networking :: Monitoring', +'Topic :: System :: Networking :: Monitoring :: Hardware Watchdog', +'Topic :: System :: Networking :: Time Synchronization', +'Topic :: System :: Operating System', +'Topic :: System :: Operating System Kernels', +'Topic :: System :: Operating System Kernels :: BSD', +'Topic :: System :: Operating System Kernels :: GNU Hurd', +'Topic :: System :: Operating System Kernels :: Linux', +'Topic :: System :: Power (UPS)', +'Topic :: System :: Recovery Tools', +'Topic :: System :: Shells', +'Topic :: System :: Software Distribution', +'Topic :: System :: Systems Administration', +'Topic :: System :: Systems Administration :: Authentication/Directory', +'Topic :: System :: Systems Administration :: Authentication/Directory :: LDAP', +'Topic :: System :: Systems Administration :: Authentication/Directory :: NIS', +'Topic :: System :: System Shells', +'Topic :: Terminals', +'Topic :: Terminals :: Serial', +'Topic :: Terminals :: Telnet', +'Topic :: Terminals :: Terminal Emulators/X Terminals', +'Topic :: Text Editors', +'Topic :: Text Editors :: Documentation', +'Topic :: Text Editors :: Emacs', +'Topic :: Text Editors :: Integrated Development Environments (IDE)', +'Topic :: Text Editors :: Text Processing', +'Topic :: Text Editors :: Word Processors', +'Topic :: Text Processing', +'Topic :: Text Processing :: Filters', +'Topic :: Text Processing :: Fonts', +'Topic :: Text Processing :: General', +'Topic :: Text Processing :: Indexing', +'Topic :: Text Processing :: Linguistic', +'Topic :: Text Processing :: Markup', +'Topic :: Text Processing :: Markup :: HTML', +'Topic :: Text Processing :: Markup :: LaTeX', +'Topic :: Text Processing :: Markup :: SGML', +'Topic :: Text Processing :: Markup :: VRML', +'Topic :: Text Processing :: Markup :: XML', +'Topic :: Utilities', ] diff --git a/distutils2/command/__init__.py b/distutils2/command/__init__.py --- a/distutils2/command/__init__.py +++ b/distutils2/command/__init__.py @@ -1,8 +1,6 @@ -"""distutils.command +"""Subpackage containing all standard commands.""" -Package containing implementation of all the standard Distutils -commands.""" -from distutils2.errors import DistutilsModuleError +from distutils2.errors import PackagingModuleError from distutils2.util import resolve_name __all__ = ['get_command_names', 'set_command', 'get_command_class', @@ -50,9 +48,9 @@ """Return the registered command""" try: cls = _COMMANDS[name] - if isinstance(cls, str): + if isinstance(cls, basestring): cls = resolve_name(cls) _COMMANDS[name] = cls return cls except KeyError: - raise DistutilsModuleError("Invalid command %s" % name) + raise PackagingModuleError("Invalid command %s" % name) diff --git a/distutils2/command/bdist.py b/distutils2/command/bdist.py --- a/distutils2/command/bdist.py +++ b/distutils2/command/bdist.py @@ -1,12 +1,15 @@ -"""distutils.command.bdist +"""Create a built (binary) distribution. -Implements the Distutils 'bdist' command (create a built [binary] -distribution).""" +If a --formats option was given on the command line, this command will +call the corresponding bdist_* commands; if the option was absent, a +bdist_* command depending on the current platform will be called. +""" + import os from distutils2 import util from distutils2.command.cmd import Command -from distutils2.errors import DistutilsPlatformError, DistutilsOptionError +from distutils2.errors import PackagingPlatformError, PackagingOptionError def show_formats(): @@ -52,8 +55,10 @@ "lists available distribution formats", show_formats), ] - # This won't do in reality: will need to distinguish RPM-ish Linux, - # Debian-ish Linux, Solaris, FreeBSD, ..., Windows, Mac OS. + # This is of course very simplistic. The various UNIX family operating + # systems have their specific formats, but they are out of scope for us; + # bdist_dumb is, well, dumb; it's more a building block for other + # distutils2 tools than a real end-user binary format. default_format = {'posix': 'gztar', 'nt': 'zip', 'os2': 'zip'} @@ -79,7 +84,7 @@ self.plat_name = None self.formats = None self.dist_dir = None - self.skip_build = 0 + self.skip_build = False self.group = None self.owner = None @@ -104,9 +109,8 @@ try: self.formats = [self.default_format[os.name]] except KeyError: - raise DistutilsPlatformError, \ - "don't know how to create built distributions " + \ - "on platform %s" % os.name + raise PackagingPlatformError("don't know how to create built distributions " + \ + "on platform %s" % os.name) if self.dist_dir is None: self.dist_dir = "dist" @@ -118,12 +122,13 @@ try: commands.append(self.format_command[format][0]) except KeyError: - raise DistutilsOptionError, "invalid format '%s'" % format + raise PackagingOptionError("invalid format '%s'" % format) # Reinitialize and run each command. for i in range(len(self.formats)): cmd_name = commands[i] sub_cmd = self.get_reinitialized_command(cmd_name) + sub_cmd.format = self.formats[i] # passing the owner and group names for tar archiving if cmd_name == 'bdist_dumb': @@ -133,5 +138,5 @@ # If we're going to need to run this command again, tell it to # keep its temporary files around so subsequent runs go faster. if cmd_name in commands[i+1:]: - sub_cmd.keep_temp = 1 + sub_cmd.keep_temp = True self.run_command(cmd_name) diff --git a/distutils2/command/bdist_dumb.py b/distutils2/command/bdist_dumb.py --- a/distutils2/command/bdist_dumb.py +++ b/distutils2/command/bdist_dumb.py @@ -1,22 +1,19 @@ -"""distutils.command.bdist_dumb +"""Create a "dumb" built distribution. -Implements the Distutils 'bdist_dumb' command (create a "dumb" built -distribution -- i.e., just an archive to be unpacked under $prefix or -$exec_prefix).""" - +A dumb distribution is just an archive meant to be unpacked under +sys.prefix or sys.exec_prefix. +""" import os + from shutil import rmtree -try: - from sysconfig import get_python_version -except ImportError: - from distutils2._backport.sysconfig import get_python_version +from sysconfig import get_python_version from distutils2.util import get_platform from distutils2.command.cmd import Command -from distutils2.errors import DistutilsPlatformError +from distutils2.errors import PackagingPlatformError from distutils2 import logger -class bdist_dumb (Command): +class bdist_dumb(Command): description = 'create a "dumb" built distribution' @@ -52,14 +49,14 @@ 'os2': 'zip' } - def initialize_options (self): + def initialize_options(self): self.bdist_dir = None self.plat_name = None self.format = None - self.keep_temp = 0 + self.keep_temp = False self.dist_dir = None - self.skip_build = 0 - self.relative = 0 + self.skip_build = False + self.relative = False self.owner = None self.group = None @@ -72,9 +69,8 @@ try: self.format = self.default_format[os.name] except KeyError: - raise DistutilsPlatformError, \ - ("don't know how to create dumb built distributions " + - "on platform %s") % os.name + raise PackagingPlatformError(("don't know how to create dumb built distributions " + + "on platform %s") % os.name) self.set_undefined_options('bdist', 'dist_dir', 'plat_name') @@ -82,10 +78,11 @@ if not self.skip_build: self.run_command('build') - install = self.get_reinitialized_command('install_dist', reinit_subcommands=1) + install = self.get_reinitialized_command('install_dist', + reinit_subcommands=True) install.root = self.bdist_dir install.skip_build = self.skip_build - install.warn_dir = 0 + install.warn_dir = False logger.info("installing to %s", self.bdist_dir) self.run_command('install_dist') @@ -106,7 +103,7 @@ else: if (self.distribution.has_ext_modules() and (install.install_base != install.install_platbase)): - raise DistutilsPlatformError( + raise PackagingPlatformError( "can't make a dumb built distribution where base and " "platbase are different (%r, %r)" % (install.install_base, install.install_platbase)) diff --git a/distutils2/command/bdist_msi.py b/distutils2/command/bdist_msi.py --- a/distutils2/command/bdist_msi.py +++ b/distutils2/command/bdist_msi.py @@ -1,25 +1,33 @@ -# -*- coding: iso-8859-1 -*- -# Copyright (C) 2005, 2006 Martin von L?wis +"""Create a Microsoft Installer (.msi) binary distribution.""" + +# Copyright (C) 2005, 2006 Martin von L??wis # Licensed to PSF under a Contributor Agreement. -# The bdist_wininst command proper -# based on bdist_wininst -""" -Implements the bdist_msi command. -""" -import sys, os + +import sys +import os +import msilib + + from sysconfig import get_python_version - -from distutils2.core import Command -from distutils2.version import StrictVersion -from distutils2.errors import DistutilsOptionError -from distutils2 import log +from shutil import rmtree +from distutils2.command.cmd import Command +from distutils2.version import NormalizedVersion +from distutils2.errors import PackagingOptionError +from distutils2 import logger as log from distutils2.util import get_platform -from distutils2._backport.shutil import rmtree - -import msilib from msilib import schema, sequence, text from msilib import Directory, Feature, Dialog, add_data +class MSIVersion(NormalizedVersion): + """ + MSI ProductVersion must be strictly numeric. + MSIVersion disallows prerelease and postrelease versions. + """ + def __init__(self, *args, **kwargs): + super(MSIVersion, self).__init__(*args, **kwargs) + if not self.is_final: + raise ValueError("ProductVersion must be strictly numeric") + class PyDialog(Dialog): """Dialog class with a fixed layout: controls at the top, then a ruler, then a list of buttons: back, next, cancel. Optionally a bitmap at the @@ -81,7 +89,7 @@ Return the button, so that events can be associated""" return self.pushbutton(name, int(self.w*xpos - 28), self.h-27, 56, 17, 3, title, next) -class bdist_msi (Command): +class bdist_msi(Command): description = "create a Microsoft Installer (.msi) binary distribution" @@ -123,20 +131,20 @@ '3.5', '3.6', '3.7', '3.8', '3.9'] other_version = 'X' - def initialize_options (self): + def initialize_options(self): self.bdist_dir = None self.plat_name = None - self.keep_temp = 0 - self.no_target_compile = 0 - self.no_target_optimize = 0 + self.keep_temp = False + self.no_target_compile = False + self.no_target_optimize = False self.target_version = None self.dist_dir = None - self.skip_build = 0 + self.skip_build = False self.install_script = None self.pre_install_script = None self.versions = None - def finalize_options (self): + def finalize_options(self): if self.bdist_dir is None: bdist_base = self.get_finalized_command('bdist').bdist_base self.bdist_dir = os.path.join(bdist_base, 'msi') @@ -147,41 +155,39 @@ self.versions = [self.target_version] if not self.skip_build and self.distribution.has_ext_modules()\ and self.target_version != short_version: - raise DistutilsOptionError, \ - "target version can only be %s, or the '--skip-build'" \ - " option must be specified" % (short_version,) + raise PackagingOptionError("target version can only be %s, or the '--skip-build'" \ + " option must be specified" % (short_version,)) else: self.versions = list(self.all_versions) self.set_undefined_options('bdist', 'dist_dir', 'plat_name') if self.pre_install_script: - raise DistutilsOptionError, "the pre-install-script feature is not yet implemented" + raise PackagingOptionError("the pre-install-script feature is not yet implemented") if self.install_script: for script in self.distribution.scripts: if self.install_script == os.path.basename(script): break else: - raise DistutilsOptionError, \ - "install_script '%s' not found in scripts" % \ - self.install_script + raise PackagingOptionError("install_script '%s' not found in scripts" % \ + self.install_script) self.install_script_key = None - # finalize_options() - def run (self): + def run(self): if not self.skip_build: self.run_command('build') - install = self.get_reinitialized_command('install_dist', reinit_subcommands=1) + install = self.get_reinitialized_command('install_dist', + reinit_subcommands=True) install.prefix = self.bdist_dir install.skip_build = self.skip_build - install.warn_dir = 0 + install.warn_dir = False install_lib = self.get_reinitialized_command('install_lib') # we do not want to include pyc or pyo files - install_lib.compile = 0 + install_lib.compile = False install_lib.optimize = 0 if self.distribution.has_ext_modules(): @@ -223,10 +229,7 @@ author = metadata.maintainer if not author: author = "UNKNOWN" - version = metadata.get_version() - # ProductVersion must be strictly numeric - # XXX need to deal with prerelease versions - sversion = "%d.%d.%d" % StrictVersion(version).version + version = MSIVersion(metadata.get_version()) # Prefix ProductName with Python x.y, so that # it sorts together with the other Python packages # in Add-Remove-Programs (APR) @@ -237,7 +240,7 @@ product_name = "Python %s" % (fullname) self.db = msilib.init_database(installer_name, schema, product_name, msilib.gen_uuid(), - sversion, author) + str(version), author) msilib.add_tables(self.db, sequence) props = [('DistVersion', version)] email = metadata.author_email or metadata.maintainer_email @@ -307,7 +310,7 @@ key = seen[afile] = dir.add_file(file) if file==self.install_script: if self.install_script_key: - raise DistutilsOptionError( + raise PackagingOptionError( "Multiple files with name %s" % file) self.install_script_key = '[#%s]' % key else: @@ -387,27 +390,27 @@ # entries for each version as the above code does if self.pre_install_script: scriptfn = os.path.join(self.bdist_dir, "preinstall.bat") - f = open(scriptfn, "w") - # The batch file will be executed with [PYTHON], so that %1 - # is the path to the Python interpreter; %0 will be the path - # of the batch file. - # rem =""" - # %1 %0 - # exit - # """ - # - f.write('rem ="""\n%1 %0\nexit\n"""\n') - f.write(open(self.pre_install_script).read()) - f.close() + with open(scriptfn, "w") as f: + # The batch file will be executed with [PYTHON], so that %1 + # is the path to the Python interpreter; %0 will be the path + # of the batch file. + # rem =""" + # %1 %0 + # exit + # """ + # + f.write('rem ="""\n%1 %0\nexit\n"""\n') + with open(self.pre_install_script) as fp: + f.write(fp.read()) add_data(self.db, "Binary", - [("PreInstall", msilib.Binary(scriptfn)) - ]) + [("PreInstall", msilib.Binary(scriptfn)), + ]) add_data(self.db, "CustomAction", - [("PreInstall", 2, "PreInstall", None) - ]) + [("PreInstall", 2, "PreInstall", None), + ]) add_data(self.db, "InstallExecuteSequence", - [("PreInstall", "NOT Installed", 450)]) - + [("PreInstall", "NOT Installed", 450), + ]) def add_ui(self): db = self.db diff --git a/distutils2/command/bdist_wininst.py b/distutils2/command/bdist_wininst.py --- a/distutils2/command/bdist_wininst.py +++ b/distutils2/command/bdist_wininst.py @@ -1,25 +1,21 @@ -"""distutils.command.bdist_wininst +"""Create an executable installer for Windows.""" -Implements the Distutils 'bdist_wininst' command: create a windows installer -exe-program.""" - +# FIXME synchronize bytes/str use with same file in distutils import sys import os -import string + from shutil import rmtree -try: - from sysconfig import get_python_version -except ImportError: - from distutils2._backport.sysconfig import get_python_version +from sysconfig import get_python_version from distutils2.command.cmd import Command -from distutils2.errors import DistutilsOptionError, DistutilsPlatformError +from distutils2.errors import PackagingOptionError, PackagingPlatformError from distutils2 import logger from distutils2.util import get_platform -class bdist_wininst (Command): - description = "create an executable installer for MS Windows" +class bdist_wininst(Command): + + description = "create an executable installer for Windows" user_options = [('bdist-dir=', None, "temporary directory for creating the distribution"), @@ -61,25 +57,23 @@ boolean_options = ['keep-temp', 'no-target-compile', 'no-target-optimize', 'skip-build'] - def initialize_options (self): + def initialize_options(self): self.bdist_dir = None self.plat_name = None - self.keep_temp = 0 - self.no_target_compile = 0 - self.no_target_optimize = 0 + self.keep_temp = False + self.no_target_compile = False + self.no_target_optimize = False self.target_version = None self.dist_dir = None self.bitmap = None self.title = None - self.skip_build = 0 + self.skip_build = False self.install_script = None self.pre_install_script = None self.user_access_control = None - # initialize_options() - - def finalize_options (self): + def finalize_options(self): if self.bdist_dir is None: if self.skip_build and self.plat_name: # If build is skipped and plat_name is overridden, bdist will @@ -94,9 +88,8 @@ if not self.skip_build and self.distribution.has_ext_modules(): short_version = get_python_version() if self.target_version and self.target_version != short_version: - raise DistutilsOptionError, \ - "target version can only be %s, or the '--skip-build'" \ - " option must be specified" % (short_version,) + raise PackagingOptionError("target version can only be %s, or the '--skip-build'" \ + " option must be specified" % (short_version,)) self.target_version = short_version self.set_undefined_options('bdist', 'dist_dir', 'plat_name') @@ -106,32 +99,30 @@ if self.install_script == os.path.basename(script): break else: - raise DistutilsOptionError, \ - "install_script '%s' not found in scripts" % \ - self.install_script - # finalize_options() + raise PackagingOptionError("install_script '%s' not found in scripts" % \ + self.install_script) - - def run (self): + def run(self): if (sys.platform != "win32" and (self.distribution.has_ext_modules() or self.distribution.has_c_libraries())): - raise DistutilsPlatformError \ + raise PackagingPlatformError \ ("distribution contains extensions and/or C libraries; " "must be compiled on a Windows 32 platform") if not self.skip_build: self.run_command('build') - install = self.get_reinitialized_command('install', reinit_subcommands=1) + install = self.get_reinitialized_command('install', + reinit_subcommands=True) install.root = self.bdist_dir install.skip_build = self.skip_build - install.warn_dir = 0 + install.warn_dir = False install.plat_name = self.plat_name install_lib = self.get_reinitialized_command('install_lib') # we do not want to include pyc or pyo files - install_lib.compile = 0 + install_lib.compile = False install_lib.optimize = 0 if self.distribution.has_ext_modules(): @@ -153,7 +144,7 @@ # Use a custom scheme for the zip-file, because we have to decide # at installation time which scheme to use. for key in ('purelib', 'platlib', 'headers', 'scripts', 'data'): - value = string.upper(key) + value = key.upper() if key == 'headers': value = value + '/Include/$dist_name' setattr(install, @@ -196,7 +187,7 @@ else: rmtree(self.bdist_dir) - def get_inidata (self): + def get_inidata(self): # Return data describing the installation. lines = [] @@ -211,14 +202,14 @@ # Escape newline characters def escape(s): - return string.replace(s, "\n", "\\n") + return s.replace("\n", "\\n") for name in ["author", "author_email", "description", "maintainer", "maintainer_email", "name", "url", "version"]: data = getattr(metadata, name, "") if data: info = info + ("\n %s: %s" % \ - (string.capitalize(name), escape(data))) + (name.capitalize(), escape(data))) lines.append("%s=%s" % (name, escape(data))) # The [setup] section contains entries controlling @@ -241,11 +232,9 @@ build_info = "Built %s with distutils2-%s" % \ (time.ctime(time.time()), distutils2.__version__) lines.append("build_info=%s" % build_info) - return string.join(lines, "\n") + return "\n".join(lines) - # get_inidata() - - def create_exe (self, arcname, fullname, bitmap=None): + def create_exe(self, arcname, fullname, bitmap=None): import struct self.mkpath(self.dist_dir) @@ -253,52 +242,48 @@ cfgdata = self.get_inidata() installer_name = self.get_installer_filename(fullname) - self.announce("creating %s" % installer_name) + logger.info("creating %s", installer_name) if bitmap: - bitmapdata = open(bitmap, "rb").read() + with open(bitmap, "rb") as fp: + bitmapdata = fp.read() bitmaplen = len(bitmapdata) else: bitmaplen = 0 - file = open(installer_name, "wb") - file.write(self.get_exe_bytes()) - if bitmap: - file.write(bitmapdata) + with open(installer_name, "wb") as file: + file.write(self.get_exe_bytes()) + if bitmap: + file.write(bitmapdata) - # Convert cfgdata from unicode to ascii, mbcs encoded - try: - unicode - except NameError: - pass - else: + # Convert cfgdata from unicode to ascii, mbcs encoded if isinstance(cfgdata, unicode): cfgdata = cfgdata.encode("mbcs") - # Append the pre-install script - cfgdata = cfgdata + "\0" - if self.pre_install_script: - script_data = open(self.pre_install_script, "r").read() - cfgdata = cfgdata + script_data + "\n\0" - else: - # empty pre-install script + # Append the pre-install script cfgdata = cfgdata + "\0" - file.write(cfgdata) + if self.pre_install_script: + with open(self.pre_install_script) as fp: + script_data = fp.read() + cfgdata = cfgdata + script_data + "\n\0" + else: + # empty pre-install script + cfgdata = cfgdata + "\0" + file.write(cfgdata) - # The 'magic number' 0x1234567B is used to make sure that the - # binary layout of 'cfgdata' is what the wininst.exe binary - # expects. If the layout changes, increment that number, make - # the corresponding changes to the wininst.exe sources, and - # recompile them. - header = struct.pack("= logging.DEBUG + # Setup the CCompiler object that we'll use to do all the # compiling and linking - - # used to prevent the usage of an existing compiler for the - # compiler option when calling new_compiler() - # this will be removed in 3.3 and 2.8 - if not isinstance(self._compiler, str): - self._compiler = None - - self.compiler_obj = new_compiler(compiler=self._compiler, - verbose=self.verbose, + self.compiler_obj = new_compiler(compiler=self.compiler, + verbose=verbose, dry_run=self.dry_run, force=self.force) - # used to keep the compiler object reachable with - # "self.compiler". this will be removed in 3.3 and 2.8 - self._compiler = self.compiler_obj - customize_compiler(self.compiler_obj) # If we are cross-compiling, init the compiler now (if we are not # cross-compiling, init would not hurt, but people may rely on @@ -384,7 +324,7 @@ self.compiler_obj.set_include_dirs(self.include_dirs) if self.define is not None: # 'define' option is a list of (name,value) tuples - for (name, value) in self.define: + for name, value in self.define: self.compiler_obj.define_macro(name, value) if self.undef is not None: for macro in self.undef: @@ -423,19 +363,19 @@ for ext in self.extensions: try: self.build_extension(ext) - except (CCompilerError, DistutilsError, CompileError), e: + except (CCompilerError, PackagingError, CompileError): if not ext.optional: raise - self.warn('building extension "%s" failed: %s' % - (ext.name, e)) + logger.warning('%s: building extension %r failed: %s', + self.get_command_name(), ext.name, + sys.exc_info()[1]) def build_extension(self, ext): sources = ext.sources if sources is None or not isinstance(sources, (list, tuple)): - raise DistutilsSetupError, \ - ("in 'ext_modules' option (extension '%s'), " + + raise PackagingSetupError(("in 'ext_modules' option (extension '%s'), " + "'sources' must be present and must be " + - "a list of source filenames") % ext.name + "a list of source filenames") % ext.name) sources = list(sources) ext_path = self.get_ext_fullpath(ext.name) @@ -484,7 +424,7 @@ # The setup.py script for Python on Unix needs to be able to # get this list so it can perform all the clean up needed to # avoid keeping object files around when cleaning out a failed - # build of an extension module. Since Distutils does not + # build of an extension module. Since Packaging does not # track dependencies, we have to get rid of intermediates to # ensure all the intermediates will be properly re-built. # @@ -527,17 +467,13 @@ # source -- but there should be an option to put SWIG output in # the temp dir. - if self.swig_cpp: - logger.warn("--swig-cpp is deprecated - use --swig-opts=-c++") - - if self.swig_cpp or ('-c++' in self.swig_opts) or \ - ('-c++' in extension.swig_opts): + if ('-c++' in self.swig_opts or '-c++' in extension.swig_opts): target_ext = '.cpp' else: target_ext = '.c' for source in sources: - (base, ext) = os.path.splitext(source) + base, ext = os.path.splitext(source) if ext == ".i": # SWIG interface file new_sources.append(base + '_wrap' + target_ext) swig_sources.append(source) @@ -551,8 +487,6 @@ swig = self.swig or self.find_swig() swig_cmd = [swig, "-python"] swig_cmd.extend(self.swig_opts) - if self.swig_cpp: - swig_cmd.append("-c++") # Do not override commandline arguments if not self.swig_opts: @@ -591,9 +525,8 @@ return "swig.exe" else: - raise DistutilsPlatformError, \ - ("I don't know how to find (much less run) SWIG " - "on platform '%s'") % os.name + raise PackagingPlatformError(("I don't know how to find (much less run) SWIG " + "on platform '%s'") % os.name) # -- Name generators ----------------------------------------------- # (extension names, filenames, whatever) @@ -654,7 +587,7 @@ provided, "init" + module_name. Only relevant on Windows, where the .pyd file (DLL) must export the module "init" function. """ - initfunc_name = "init" + ext.name.split('.')[-1] + initfunc_name = "PyInit_" + ext.name.split('.')[-1] if initfunc_name not in ext.export_symbols: ext.export_symbols.append(initfunc_name) return ext.export_symbols @@ -723,9 +656,9 @@ else: if sysconfig.get_config_var('Py_ENABLE_SHARED'): - template = "python%d.%d" - pythonlib = (template % - (sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff)) + pythonlib = 'python{}.{}{}'.format( + sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff, + sys.abiflags) return ext.libraries + [pythonlib] else: return ext.libraries diff --git a/distutils2/command/build_py.py b/distutils2/command/build_py.py --- a/distutils2/command/build_py.py +++ b/distutils2/command/build_py.py @@ -1,15 +1,12 @@ -"""distutils.command.build_py - -Implements the Distutils 'build_py' command.""" - +"""Build pure Python modules (just copy to build directory).""" import os import sys -import logging from glob import glob +from distutils2 import logger from distutils2.command.cmd import Command -from distutils2.errors import DistutilsOptionError, DistutilsFileError +from distutils2.errors import PackagingOptionError, PackagingFileError from distutils2.util import convert_path from distutils2.compat import Mixin2to3 @@ -18,10 +15,10 @@ class build_py(Command, Mixin2to3): - description = "\"build\" pure Python modules (copy to build directory)" + description = "build pure Python modules (copy to build directory)" user_options = [ - ('build-lib=', 'd', "directory to \"build\" (copy) to"), + ('build-lib=', 'd', "directory to build (copy) to"), ('compile', 'c', "compile .py to .pyc"), ('no-compile', None, "don't compile .py files [default]"), ('optimize=', 'O', @@ -45,7 +42,7 @@ self.package = None self.package_data = None self.package_dir = None - self.compile = 0 + self.compile = False self.optimize = 0 self.force = None self._updated_files = [] @@ -77,7 +74,7 @@ self.optimize = int(self.optimize) assert 0 <= self.optimize <= 2 except (ValueError, AssertionError): - raise DistutilsOptionError("optimize must be 0, 1, or 2") + raise PackagingOptionError("optimize must be 0, 1, or 2") def run(self): # XXX copy_file by default preserves atime and mtime. IMHO this is @@ -111,7 +108,7 @@ self.run_2to3(self._updated_files, self._doctests_2to3, self.use_2to3_fixers) - self.byte_compile(self.get_outputs(include_bytecode=0)) + self.byte_compile(self.get_outputs(include_bytecode=False)) # -- Top-level worker functions ------------------------------------ @@ -154,7 +151,7 @@ # Each pattern has to be converted to a platform-specific path filelist = glob(os.path.join(src_dir, convert_path(pattern))) # Files that match more than one pattern are only added once - files.extend([fn for fn in filelist if fn not in files]) + files.extend(fn for fn in filelist if fn not in files) return files def build_package_data(self): @@ -179,6 +176,7 @@ """Return the directory, relative to the top of the source distribution, where package 'package' should be found (at least according to the 'package_dir' option, if any).""" + path = package.split('.') if self.package_dir is not None: path.insert(0, self.package_dir) @@ -197,10 +195,10 @@ # circumvent them. if package_dir != "": if not os.path.exists(package_dir): - raise DistutilsFileError( + raise PackagingFileError( "package directory '%s' does not exist" % package_dir) if not os.path.isdir(package_dir): - raise DistutilsFileError( + raise PackagingFileError( "supposed package directory '%s' exists, " "but is not a directory" % package_dir) @@ -210,8 +208,8 @@ if os.path.isfile(init_py): return init_py else: - logging.warning(("package init file '%s' not found " + - "(or not a regular file)"), init_py) + logger.warning(("package init file '%s' not found " + + "(or not a regular file)"), init_py) # Either not in a package at all (__init__.py not expected), or # __init__.py doesn't exist -- so don't return the filename. @@ -219,8 +217,8 @@ def check_module(self, module, module_file): if not os.path.isfile(module_file): - logging.warning("file %s (for module %s) not found", - module_file, module) + logger.warning("file %s (for module %s) not found", + module_file, module) return False else: return True @@ -240,7 +238,7 @@ module = os.path.splitext(os.path.basename(f))[0] modules.append((package, module, f)) else: - self.debug_print("excluding %s" % setup_script) + logger.debug("excluding %s", setup_script) return modules def find_modules(self): @@ -273,10 +271,10 @@ module_base = path[-1] try: - (package_dir, checked) = packages[package] + package_dir, checked = packages[package] except KeyError: package_dir = self.get_package_dir(package) - checked = 0 + checked = False if not checked: init_py = self.check_package(package, package_dir) @@ -323,10 +321,10 @@ outfile_path = [build_dir] + list(package) + [module + ".py"] return os.path.join(*outfile_path) - def get_outputs(self, include_bytecode=1): + def get_outputs(self, include_bytecode=True): modules = self.find_all_modules() outputs = [] - for (package, module, module_file) in modules: + for package, module, module_file in modules: package = package.split('.') filename = self.get_module_outfile(self.build_lib, package, module) outputs.append(filename) @@ -344,7 +342,7 @@ return outputs def build_module(self, module, module_file, package): - if isinstance(package, str): + if isinstance(package, basestring): package = package.split('.') elif not isinstance(package, (list, tuple)): raise TypeError( @@ -356,11 +354,11 @@ outfile = self.get_module_outfile(self.build_lib, package, module) dir = os.path.dirname(outfile) self.mkpath(dir) - return self.copy_file(module_file, outfile, preserve_mode=0) + return self.copy_file(module_file, outfile, preserve_mode=False) def build_modules(self): modules = self.find_modules() - for (package, module, module_file) in modules: + for package, module, module_file in modules: # Now "build" the module -- ie. copy the source file to # self.build_lib (the build directory for Python source). @@ -385,13 +383,14 @@ # Now loop over the modules we found, "building" each one (just # copy it to self.build_lib). - for (package_, module, module_file) in modules: + for package_, module, module_file in modules: assert package == package_ self.build_module(module, module_file, package) def byte_compile(self, files): if hasattr(sys, 'dont_write_bytecode') and sys.dont_write_bytecode: - self.warn('byte-compiling is disabled, skipping.') + logger.warning('%s: byte-compiling is disabled, skipping.', + self.get_command_name()) return from distutils2.util import byte_compile diff --git a/distutils2/command/build_scripts.py b/distutils2/command/build_scripts.py --- a/distutils2/command/build_scripts.py +++ b/distutils2/command/build_scripts.py @@ -1,26 +1,24 @@ -"""distutils.command.build_scripts +"""Build scripts (copy to build dir and fix up shebang line).""" -Implements the Distutils 'build_scripts' command.""" - - -import os, re -from stat import ST_MODE +import os +import re +import sysconfig from distutils2.command.cmd import Command -from distutils2.util import convert_path, newer +from distutils2.util import convert_path, newer, detect_encoding, fsencode from distutils2 import logger -from distutils2._backport import sysconfig from distutils2.compat import Mixin2to3 + # check if Python is called on the first line with this expression -first_line_re = re.compile('^#!.*python[0-9.]*([ \t].*)?$') +first_line_re = re.compile(b'^#!.*python[0-9.]*([ \t].*)?$') -class build_scripts (Command, Mixin2to3): +class build_scripts(Command, Mixin2to3): - description = "\"build\" scripts (copy and fixup #! line)" + description = "build scripts (copy and fix up shebang line)" user_options = [ - ('build-dir=', 'd', "directory to \"build\" (copy) to"), + ('build-dir=', 'd', "directory to build (copy) to"), ('force', 'f', "forcibly build everything (ignore file timestamps"), ('executable=', 'e', "specify final destination interpreter path"), ] @@ -28,7 +26,7 @@ boolean_options = ['force'] - def initialize_options (self): + def initialize_options(self): self.build_dir = None self.scripts = None self.force = None @@ -38,7 +36,7 @@ self.convert_2to3_doctests = None self.use_2to3_fixers = None - def finalize_options (self): + def finalize_options(self): self.set_undefined_options('build', ('build_scripts', 'build_dir'), 'use_2to3', 'use_2to3_fixers', @@ -49,14 +47,14 @@ def get_source_files(self): return self.scripts - def run (self): + def run(self): if not self.scripts: return copied_files = self.copy_scripts() - if self.use_2to3 and self.copied_files: - self._run_2to3(self.copied_files, fixers=self.use_2to3_fixers) + if self.use_2to3 and copied_files: + self._run_2to3(copied_files, fixers=self.use_2to3_fixers) - def copy_scripts (self): + def copy_scripts(self): """Copy each script listed in 'self.scripts'; if it's marked as a Python script in the Unix way (first line matches 'first_line_re', ie. starts with "\#!" and contains "python"), then adjust the first @@ -65,7 +63,7 @@ self.mkpath(self.build_dir) outfiles = [] for script in self.scripts: - adjust = 0 + adjust = False script = convert_path(script) outfile = os.path.join(self.build_dir, os.path.basename(script)) outfiles.append(outfile) @@ -78,40 +76,62 @@ # that way, we'll get accurate feedback if we can read the # script. try: - f = open(script, "r") + f = open(script, "rb") except IOError: if not self.dry_run: raise f = None else: + encoding, lines = detect_encoding(f.readline) + f.seek(0) first_line = f.readline() if not first_line: - self.warn("%s is an empty file (skipping)" % script) + logger.warning('%s: %s is an empty file (skipping)', + self.get_command_name(), script) continue match = first_line_re.match(first_line) if match: - adjust = 1 - post_interp = match.group(1) or '' + adjust = True + post_interp = match.group(1) or b'' if adjust: logger.info("copying and adjusting %s -> %s", script, self.build_dir) if not self.dry_run: - outf = open(outfile, "w") if not sysconfig.is_python_build(): - outf.write("#!%s%s\n" % - (self.executable, - post_interp)) + executable = self.executable else: - outf.write("#!%s%s\n" % - (os.path.join( + executable = os.path.join( sysconfig.get_config_var("BINDIR"), "python%s%s" % (sysconfig.get_config_var("VERSION"), - sysconfig.get_config_var("EXE"))), - post_interp)) - outf.writelines(f.readlines()) - outf.close() + sysconfig.get_config_var("EXE"))) + executable = fsencode(executable) + shebang = b"#!" + executable + post_interp + b"\n" + # Python parser starts to read a script using UTF-8 until + # it gets a #coding:xxx cookie. The shebang has to be the + # first line of a file, the #coding:xxx cookie cannot be + # written before. So the shebang has to be decodable from + # UTF-8. + try: + shebang.decode('utf-8') + except UnicodeDecodeError: + raise ValueError( + "The shebang ({!r}) is not decodable " + "from utf-8".format(shebang)) + # If the script is encoded to a custom encoding (use a + # #coding:xxx cookie), the shebang has to be decodable from + # the script encoding too. + try: + shebang.decode(encoding) + except UnicodeDecodeError: + raise ValueError( + "The shebang ({!r}) is not decodable " + "from the script encoding ({})" + .format(shebang, encoding)) + with open(outfile, "wb") as outf: + outf.write(shebang) + outf.writelines(f.readlines()) if f: f.close() else: @@ -124,13 +144,10 @@ if self.dry_run: logger.info("changing mode of %s", file) else: - oldmode = os.stat(file)[ST_MODE] & 07777 - newmode = (oldmode | 0555) & 07777 + oldmode = os.stat(file).st_mode & 0o7777 + newmode = (oldmode | 0o555) & 0o7777 if newmode != oldmode: logger.info("changing mode of %s from %o to %o", file, oldmode, newmode) os.chmod(file, newmode) return outfiles - # copy_scripts () - -# class build_scripts diff --git a/distutils2/command/check.py b/distutils2/command/check.py --- a/distutils2/command/check.py +++ b/distutils2/command/check.py @@ -1,16 +1,14 @@ -"""distutils.command.check +"""Check PEP compliance of metadata.""" -Implements the Distutils 'check' command. -""" - +from distutils2 import logger from distutils2.command.cmd import Command -from distutils2.errors import DistutilsSetupError +from distutils2.errors import PackagingSetupError from distutils2.util import resolve_name class check(Command): - """This command checks the metadata of the package. - """ - description = ("perform some checks on the package") + + description = "check PEP compliance of metadata" + user_options = [('metadata', 'm', 'Verify metadata'), ('all', 'a', ('runs extended set of checks')), @@ -21,18 +19,20 @@ def initialize_options(self): """Sets default values for options.""" - self.all = 0 - self.metadata = 1 - self.strict = 0 + self.all = False + self.metadata = True + self.strict = False self._warnings = [] def finalize_options(self): pass - def warn(self, msg): - """Counts the number of warnings that occurs.""" - self._warnings.append(msg) - return Command.warn(self, msg) + def warn(self, msg, *args): + """Wrapper around logging that also remembers messages.""" + # XXX we could use a special handler for this, but would need to test + # if it works even if the logger has a too high level + self._warnings.append((msg, args)) + return logger.warning('%s: %s' % (self.get_command_name(), msg), *args) def run(self): """Runs the command.""" @@ -46,8 +46,8 @@ # let's raise an error in strict mode, if we have at least # one warning if self.strict and len(self._warnings) > 0: - msg = '\n'.join(self._warnings) - raise DistutilsSetupError(msg) + msg = '\n'.join(msg % args for msg, args in self._warnings) + raise PackagingSetupError(msg) def check_metadata(self): """Ensures that all required elements of metadata are supplied. @@ -58,7 +58,7 @@ """ missing, warnings = self.distribution.metadata.check(strict=True) if missing != []: - self.warn("missing required metadata: %s" % ', '.join(missing)) + self.warn('missing required metadata: %s', ', '.join(missing)) for warning in warnings: self.warn(warning) @@ -74,15 +74,15 @@ warning = '%s (line %s)' % (warning[1], line) self.warn(warning) elif self.strict: - raise DistutilsSetupError('The docutils package is needed.') + raise PackagingSetupError('The docutils package is needed.') def check_hooks_resolvable(self): - for options in self.distribution.command_options.itervalues(): + for options in self.distribution.command_options.values(): for hook_kind in ("pre_hook", "post_hook"): if hook_kind not in options: break - for hook_name in options[hook_kind][1].itervalues(): + for hook_name in options[hook_kind][1].values(): try: resolve_name(hook_name) except ImportError: - self.warn("Name '%s' cannot be resolved." % hook_name) + self.warn('name %r cannot be resolved', hook_name) diff --git a/distutils2/command/clean.py b/distutils2/command/clean.py --- a/distutils2/command/clean.py +++ b/distutils2/command/clean.py @@ -1,9 +1,6 @@ -"""distutils.command.clean +"""Clean up temporary files created by the build command.""" -Implements the Distutils 'clean' command.""" - -# contributed by Bastian Kleineidam , added 2000-03-18 - +# Contributed by Bastian Kleineidam import os from shutil import rmtree @@ -66,7 +63,7 @@ else: rmtree(directory) else: - logger.warn("'%s' does not exist -- can't clean it", + logger.warning("'%s' does not exist -- can't clean it", directory) # just for the heck of it, try to remove the base build directory: @@ -77,5 +74,3 @@ logger.info("removing '%s'", self.build_base) except OSError: pass - -# class clean diff --git a/distutils2/command/cmd.py b/distutils2/command/cmd.py --- a/distutils2/command/cmd.py +++ b/distutils2/command/cmd.py @@ -1,21 +1,16 @@ -"""distutils.cmd +"""Base class for commands.""" -Provides the Command class, the base class for the command classes -in the distutils.command package. -""" import os import re -import logging - -from distutils2.errors import DistutilsOptionError +from shutil import copyfile, move from distutils2 import util from distutils2 import logger -from distutils2._backport.shutil import copytree, copyfile, move, make_archive +from distutils2.errors import PackagingOptionError class Command(object): """Abstract base class for defining command classes, the "worker bees" - of the Distutils. A useful analogy for command classes is to think of + of the Packaging. A useful analogy for command classes is to think of them as subroutines with local variables called "options". The options are "declared" in 'initialize_options()' and "defined" (given their final values, aka "finalized") in 'finalize_options()', both of which @@ -62,7 +57,8 @@ from distutils2.dist import Distribution if not isinstance(dist, Distribution): - raise TypeError("dist must be a Distribution instance") + raise TypeError("dist must be an instance of Distribution, not %r" + % type(dist)) if self.__class__ is Command: raise RuntimeError("Command is an abstract class") @@ -70,7 +66,7 @@ self.initialize_options() # Per-command versions of the global flags, so that the user can - # customize Distutils' behaviour command-by-command and let some + # customize Packaging' behaviour command-by-command and let some # commands fall back on the Distribution's behaviour. None means # "not defined, check self.distribution's copy", while 0 or 1 mean # false and true (duh). Note that this means figuring out the real @@ -80,10 +76,6 @@ # "fix" it?] self._dry_run = None - # verbose is largely ignored, but needs to be set for - # backwards compatibility (I think)? - self.verbose = dist.verbose - # Some commands define a 'self.force' option to ignore file # timestamps, but methods defined *here* assume that # 'self.force' exists for all commands. So define it here @@ -92,13 +84,13 @@ # The 'help' flag is just used for command line parsing, so # none of that complicated bureaucracy is needed. - self.help = 0 + self.help = False # 'finalized' records whether or not 'finalize_options()' has been # called. 'finalize_options()' itself should not pay attention to # this flag: it is the business of 'ensure_finalized()', which # always calls 'finalize_options()', to respect/update it. - self.finalized = 0 + self.finalized = False # XXX A more explicit way to customize dry_run would be better. @property @@ -111,7 +103,7 @@ def ensure_finalized(self): if not self.finalized: self.finalize_options() - self.finalized = 1 + self.finalized = True # Subclasses must define: # initialize_options() @@ -156,18 +148,17 @@ def dump_options(self, header=None, indent=""): if header is None: header = "command options for '%s':" % self.get_command_name() - self.announce(indent + header, level=logging.INFO) + logger.info(indent + header) indent = indent + " " negative_opt = getattr(self, 'negative_opt', ()) - for (option, _, _) in self.user_options: + for option, _, _ in self.user_options: if option in negative_opt: continue option = option.replace('-', '_') if option[-1] == "=": option = option[:-1] value = getattr(self, option) - self.announce(indent + "%s = %s" % (option, value), - level=logging.INFO) + logger.info(indent + "%s = %s", option, value) def run(self): """A command's raison d'etre: carry out the action it exists to @@ -182,13 +173,6 @@ raise RuntimeError( "abstract method -- subclass %s must override" % self.__class__) - # TODO remove this method, just use logging.info - def announce(self, msg, level=logging.INFO): - """If the current verbosity level is of greater than or equal to - 'level' print 'msg' to stdout. - """ - logger.log(level, msg) - # -- External interface -------------------------------------------- # (called by outsiders) @@ -221,7 +205,7 @@ # value meets certain type and value constraints. If not, we try to # force it into conformance (eg. if we expect a list but have a string, # split the string on comma and/or whitespace). If we can't force the - # option into conformance, raise DistutilsOptionError. Thus, command + # option into conformance, raise PackagingOptionError. Thus, command # classes need do nothing more than (eg.) # self.ensure_string_list('foo') # and they can be guaranteed that thereafter, self.foo will be @@ -232,8 +216,8 @@ if val is None: setattr(self, option, default) return default - elif not isinstance(val, str): - raise DistutilsOptionError("'%s' must be a %s (got `%s`)" % + elif not isinstance(val, basestring): + raise PackagingOptionError("'%s' must be a %s (got `%s`)" % (option, what, val)) return val @@ -252,28 +236,28 @@ val = getattr(self, option) if val is None: return - elif isinstance(val, str): + elif isinstance(val, basestring): setattr(self, option, re.split(r',\s*|\s+', val)) else: if isinstance(val, list): # checks if all elements are str - ok = 1 + ok = True for element in val: - if not isinstance(element, str): - ok = 0 + if not isinstance(element, basestring): + ok = False break else: - ok = 0 + ok = False if not ok: - raise DistutilsOptionError( + raise PackagingOptionError( "'%s' must be a list of strings (got %r)" % (option, val)) def _ensure_tested_string(self, option, tester, what, error_fmt, default=None): val = self._ensure_stringlike(option, what, default) if val is not None and not tester(val): - raise DistutilsOptionError( + raise PackagingOptionError( ("error in '%s' option: " + error_fmt) % (option, val)) def ensure_filename(self, option): @@ -324,7 +308,7 @@ setattr(self, dst_option, getattr(src_cmd_obj, src_option)) - def get_finalized_command(self, command, create=1): + def get_finalized_command(self, command, create=True): """Wrapper around Distribution's 'get_command_obj()' method: find (create if necessary and 'create' is true) the command object for 'command', call its 'ensure_finalized()' method, and return the @@ -334,7 +318,7 @@ cmd_obj.ensure_finalized() return cmd_obj - def get_reinitialized_command(self, command, reinit_subcommands=0): + def get_reinitialized_command(self, command, reinit_subcommands=False): return self.distribution.get_reinitialized_command( command, reinit_subcommands) @@ -364,14 +348,10 @@ # -- External world manipulation ----------------------------------- - # TODO remove this method, just use logging.warn - def warn(self, msg): - logger.warning("warning: %s: %s\n", self.get_command_name(), msg) - def execute(self, func, args, msg=None, level=1): util.execute(func, args, msg, dry_run=self.dry_run) - def mkpath(self, name, mode=0777, dry_run=None, verbose=0): + def mkpath(self, name, mode=0o777, dry_run=None, verbose=0): if dry_run is None: dry_run = self.dry_run name = os.path.normpath(name) @@ -386,7 +366,7 @@ os.makedirs(name, mode) def copy_file(self, infile, outfile, - preserve_mode=1, preserve_times=1, link=None, level=1): + preserve_mode=True, preserve_times=True, link=None, level=1): """Copy a file respecting verbose, dry-run and force flags. (The former two default to whatever is in the Distribution object, and the latter defaults to false for commands that don't define it.)""" @@ -398,32 +378,34 @@ copyfile(infile, outfile) return outfile, None # XXX - def copy_tree(self, infile, outfile, - preserve_mode=1, preserve_times=1, preserve_symlinks=0, - level=1): + def copy_tree(self, infile, outfile, preserve_mode=True, + preserve_times=True, preserve_symlinks=False, level=1): """Copy an entire directory tree respecting verbose, dry-run, and force flags. """ if self.dry_run: return # see if we want to display something - return copytree(infile, outfile, preserve_symlinks) + + + return util.copy_tree(infile, outfile, preserve_mode, preserve_times, + preserve_symlinks, not self.force, dry_run=self.dry_run) def move_file(self, src, dst, level=1): - """Move a file respectin dry-run flag.""" + """Move a file respecting the dry-run flag.""" if self.dry_run: return # XXX log ? return move(src, dst) - def spawn(self, cmd, search_path=1, level=1): + def spawn(self, cmd, search_path=True, level=1): """Spawn an external command respecting dry-run flag.""" from distutils2.util import spawn spawn(cmd, search_path, dry_run=self.dry_run) def make_archive(self, base_name, format, root_dir=None, base_dir=None, owner=None, group=None): - return make_archive(base_name, format, root_dir, - base_dir, dry_run=self.dry_run, - owner=owner, group=group) + return util.make_archive(base_name, format, root_dir, + base_dir, dry_run=self.dry_run, + owner=owner, group=group) def make_file(self, infiles, outfile, func, args, exec_msg=None, skip_msg=None, level=1): @@ -439,7 +421,7 @@ skip_msg = "skipping %s (inputs unchanged)" % outfile # Allow 'infiles' to be a single string - if isinstance(infiles, str): + if isinstance(infiles, basestring): infiles = (infiles,) elif not isinstance(infiles, (list, tuple)): raise TypeError( diff --git a/distutils2/command/command_template b/distutils2/command/command_template --- a/distutils2/command/command_template +++ b/distutils2/command/command_template @@ -1,7 +1,7 @@ -"""Implementation of the 'x' command.""" +"""Do X and Y.""" -import logging -from distutils2.command.cmd import Command +from packaging import logger +from packaging.command.cmd import Command class x(Command): @@ -12,11 +12,10 @@ # List of option tuples: long name, short name (None if no short # name), and help string. user_options = [ - ('', '', # long option, short option (one letter) or None - ""), # help text + ('', '', # long option, short option (one letter) or None + ""), # help text ] - def initialize_options(self): self. = None self. = None @@ -28,7 +27,7 @@ def run(self): ... - logging.info(...) + logger.info(...) if not self.dry_run: ... diff --git a/distutils2/command/config.py b/distutils2/command/config.py --- a/distutils2/command/config.py +++ b/distutils2/command/config.py @@ -1,6 +1,6 @@ -"""distutils.command.config +"""Prepare the build. -Implements the Distutils 'config' command, a (mostly) empty command class +This module provides config, a (mostly) empty command class that exists mainly to be sub-classed by specific module distributions and applications. The idea is that while every "config" command is different, at least they're all named the same, and users always see "config" in the @@ -8,11 +8,12 @@ configure-like tasks: "try to compile this C code", or "figure out where this header file lives". """ + import os import re from distutils2.command.cmd import Command -from distutils2.errors import DistutilsExecError +from distutils2.errors import PackagingExecError from distutils2.compiler import customize_compiler from distutils2 import logger @@ -20,7 +21,7 @@ class config(Command): - description = "prepare to build" + description = "prepare the build" user_options = [ ('compiler=', None, @@ -56,8 +57,8 @@ self.library_dirs = None # maximal output for now - self.noisy = 1 - self.dump_source = 1 + self.noisy = True + self.dump_source = True # list of temporary files generated along-the-way that we have # to clean at some point @@ -66,17 +67,17 @@ def finalize_options(self): if self.include_dirs is None: self.include_dirs = self.distribution.include_dirs or [] - elif isinstance(self.include_dirs, str): + elif isinstance(self.include_dirs, basestring): self.include_dirs = self.include_dirs.split(os.pathsep) if self.libraries is None: self.libraries = [] - elif isinstance(self.libraries, str): + elif isinstance(self.libraries, basestring): self.libraries = [self.libraries] if self.library_dirs is None: self.library_dirs = [] - elif isinstance(self.library_dirs, str): + elif isinstance(self.library_dirs, basestring): self.library_dirs = self.library_dirs.split(os.pathsep) def run(self): @@ -97,7 +98,7 @@ from distutils2.compiler import new_compiler if not isinstance(self.compiler, CCompiler): self.compiler = new_compiler(compiler=self.compiler, - dry_run=self.dry_run, force=1) + dry_run=self.dry_run, force=True) customize_compiler(self.compiler) if self.include_dirs: self.compiler.set_include_dirs(self.include_dirs) @@ -109,36 +110,35 @@ def _gen_temp_sourcefile(self, body, headers, lang): filename = "_configtest" + LANG_EXT[lang] - file = open(filename, "w") - if headers: - for header in headers: - file.write("#include <%s>\n" % header) - file.write("\n") - file.write(body) - if body[-1] != "\n": - file.write("\n") - file.close() + with open(filename, "w") as file: + if headers: + for header in headers: + file.write("#include <%s>\n" % header) + file.write("\n") + file.write(body) + if body[-1] != "\n": + file.write("\n") return filename def _preprocess(self, body, headers, include_dirs, lang): src = self._gen_temp_sourcefile(body, headers, lang) out = "_configtest.i" - self.temp_files.extend([src, out]) + self.temp_files.extend((src, out)) self.compiler.preprocess(src, out, include_dirs=include_dirs) - return (src, out) + return src, out def _compile(self, body, headers, include_dirs, lang): src = self._gen_temp_sourcefile(body, headers, lang) if self.dump_source: dump_file(src, "compiling '%s':" % src) - (obj,) = self.compiler.object_filenames([src]) - self.temp_files.extend([src, obj]) + obj = self.compiler.object_filenames([src])[0] + self.temp_files.extend((src, obj)) self.compiler.compile([src], include_dirs=include_dirs) - return (src, obj) + return src, obj def _link(self, body, headers, include_dirs, libraries, library_dirs, lang): - (src, obj) = self._compile(body, headers, include_dirs, lang) + src, obj = self._compile(body, headers, include_dirs, lang) prog = os.path.splitext(os.path.basename(src))[0] self.compiler.link_executable([obj], prog, libraries=libraries, @@ -149,7 +149,7 @@ prog = prog + self.compiler.exe_extension self.temp_files.append(prog) - return (src, obj, prog) + return src, obj, prog def _clean(self, *filenames): if not filenames: @@ -182,11 +182,11 @@ """ from distutils2.compiler.ccompiler import CompileError self._check_compiler() - ok = 1 + ok = True try: self._preprocess(body, headers, include_dirs, lang) except CompileError: - ok = 0 + ok = False self._clean() return ok @@ -203,20 +203,19 @@ self._check_compiler() src, out = self._preprocess(body, headers, include_dirs, lang) - if isinstance(pattern, str): + if isinstance(pattern, basestring): pattern = re.compile(pattern) - file = open(out) - match = 0 - while 1: - line = file.readline() - if line == '': - break - if pattern.search(line): - match = 1 - break + with open(out) as file: + match = False + while True: + line = file.readline() + if line == '': + break + if pattern.search(line): + match = True + break - file.close() self._clean() return match @@ -228,9 +227,9 @@ self._check_compiler() try: self._compile(body, headers, include_dirs, lang) - ok = 1 + ok = True except CompileError: - ok = 0 + ok = False logger.info(ok and "success!" or "failure.") self._clean() @@ -247,9 +246,9 @@ try: self._link(body, headers, include_dirs, libraries, library_dirs, lang) - ok = 1 + ok = True except (CompileError, LinkError): - ok = 0 + ok = False logger.info(ok and "success!" or "failure.") self._clean() @@ -267,9 +266,9 @@ src, obj, exe = self._link(body, headers, include_dirs, libraries, library_dirs, lang) self.spawn([exe]) - ok = 1 - except (CompileError, LinkError, DistutilsExecError): - ok = 0 + ok = True + except (CompileError, LinkError, PackagingExecError): + ok = False logger.info(ok and "success!" or "failure.") self._clean() @@ -281,7 +280,7 @@ # when implementing a real-world config command!) def check_func(self, func, headers=None, include_dirs=None, - libraries=None, library_dirs=None, decl=0, call=0): + libraries=None, library_dirs=None, decl=False, call=False): """Determine if function 'func' is available by constructing a source file that refers to 'func', and compiles and links it. @@ -312,8 +311,6 @@ return self.try_link(body, headers, include_dirs, libraries, library_dirs) - # check_func () - def check_lib(self, library, library_dirs=None, headers=None, include_dirs=None, other_libraries=[]): """Determine if 'library' is available to be linked against, @@ -348,8 +345,5 @@ logger.info(filename) else: logger.info(head) - file = open(filename) - try: + with open(filename) as file: logger.info(file.read()) - finally: - file.close() diff --git a/distutils2/command/install_data.py b/distutils2/command/install_data.py --- a/distutils2/command/install_data.py +++ b/distutils2/command/install_data.py @@ -1,20 +1,18 @@ -"""distutils.command.install_data +"""Install platform-independent data files.""" -Implements the Distutils 'install_data' command, for installing -platform-independent data files.""" +# Contributed by Bastian Kleineidam -# contributed by Bastian Kleineidam +import os, sys +from shutil import Error +from sysconfig import get_paths, format_value +from distutils2 import logger +from distutils2.util import convert_path +from distutils2.command.cmd import Command -import os -from distutils2.command.cmd import Command -from distutils2.util import change_root, convert_path -from distutils2._backport.sysconfig import get_paths, format_value -from distutils2._backport.shutil import Error - class install_data(Command): - description = "install data files" + description = "install platform-independent data files" user_options = [ ('install-dir=', 'd', @@ -32,9 +30,9 @@ self.outfiles = [] self.data_files_out = [] self.root = None - self.force = 0 + self.force = False self.data_files = self.distribution.data_files - self.warn_dir = 1 + self.warn_dir = True def finalize_options(self): self.set_undefined_options('install_dist', @@ -43,19 +41,20 @@ def run(self): self.mkpath(self.install_dir) - for file in self.data_files.items(): - destination = convert_path(self.expand_categories(file[1])) + for _file in self.data_files.items(): + destination = convert_path(self.expand_categories(_file[1])) dir_dest = os.path.abspath(os.path.dirname(destination)) - + self.mkpath(dir_dest) try: - (out, _) = self.copy_file(file[0], dir_dest) - except Error, e: - self.warn(e) + out = self.copy_file(_file[0], dir_dest)[0] + except Error: + e = sys.exc_info()[1] + logger.warning('%s: %s', self.get_command_name(), e) out = destination self.outfiles.append(out) - self.data_files_out.append((file[0], destination)) + self.data_files_out.append((_file[0], destination)) def expand_categories(self, path_with_categories): local_vars = get_paths() @@ -63,15 +62,16 @@ expanded_path = format_value(path_with_categories, local_vars) expanded_path = format_value(expanded_path, local_vars) if '{' in expanded_path and '}' in expanded_path: - self.warn("Unable to expand %s, some categories may missing." % - path_with_categories) + logger.warning( + '%s: unable to expand %s, some categories may be missing', + self.get_command_name(), path_with_categories) return expanded_path def get_source_files(self): - return self.data_files.keys() + return list(self.data_files) def get_inputs(self): - return self.data_files.keys() + return list(self.data_files) def get_outputs(self): return self.outfiles diff --git a/distutils2/command/install_dist.py b/distutils2/command/install_dist.py --- a/distutils2/command/install_dist.py +++ b/distutils2/command/install_dist.py @@ -1,27 +1,20 @@ -"""distutils.command.install - -Implements the Distutils 'install_dist' command.""" - +"""Main install command, which calls the other install_* commands.""" import sys import os -from distutils2._backport import sysconfig -from distutils2._backport.sysconfig import (get_config_vars, get_paths, - get_path, get_config_var) +import sysconfig +from sysconfig import get_config_vars, get_paths, get_path, get_config_var from distutils2 import logger from distutils2.command.cmd import Command -from distutils2.errors import DistutilsPlatformError +from distutils2.errors import PackagingPlatformError from distutils2.util import write_file from distutils2.util import convert_path, change_root, get_platform -from distutils2.errors import DistutilsOptionError +from distutils2.errors import PackagingOptionError -# compatibility with 2.4 and 2.5 -if sys.version < '2.6': - HAS_USER_SITE = False -else: - HAS_USER_SITE = True + +HAS_USER_SITE = True class install_dist(Command): @@ -123,7 +116,7 @@ self.exec_prefix = None self.home = None if HAS_USER_SITE: - self.user = 0 + self.user = False # These select only the installation base; it's up to the user to # specify the installation scheme (currently, that means supplying @@ -158,7 +151,7 @@ # 'install_path_file' is always true unless some outsider meddles # with it. self.extra_path = None - self.install_path_file = 1 + self.install_path_file = True # 'force' forces installation, even if target files are not # out-of-date. 'skip_build' skips running the "build" command, @@ -166,9 +159,9 @@ # a user option, it's just there so the bdist_* commands can turn # it off) determines whether we warn about installing to a # directory not in sys.path. - self.force = 0 - self.skip_build = 0 - self.warn_dir = 1 + self.force = False + self.skip_build = False + self.warn_dir = True # These are only here as a conduit from the 'build' command to the # 'install_*' commands that do the real work. ('build_base' isn't @@ -218,25 +211,27 @@ if ((self.prefix or self.exec_prefix or self.home) and (self.install_base or self.install_platbase)): - raise DistutilsOptionError( + raise PackagingOptionError( "must supply either prefix/exec-prefix/home or " "install-base/install-platbase -- not both") if self.home and (self.prefix or self.exec_prefix): - raise DistutilsOptionError( + raise PackagingOptionError( "must supply either home or prefix/exec-prefix -- not both") if HAS_USER_SITE and self.user and ( self.prefix or self.exec_prefix or self.home or self.install_base or self.install_platbase): - raise DistutilsOptionError( + raise PackagingOptionError( "can't combine user with prefix/exec_prefix/home or " "install_base/install_platbase") # Next, stuff that's wrong (or dubious) only on certain platforms. if os.name != "posix": if self.exec_prefix: - self.warn("exec-prefix option ignored on this platform") + logger.warning( + '%s: exec-prefix option ignored on this platform', + self.get_command_name()) self.exec_prefix = None # Now the interesting logic -- so interesting that we farm it out @@ -355,14 +350,14 @@ self.install_headers is None or self.install_scripts is None or self.install_data is None): - raise DistutilsOptionError( + raise PackagingOptionError( "install-base or install-platbase supplied, but " "installation scheme is incomplete") return if HAS_USER_SITE and self.user: if self.install_userbase is None: - raise DistutilsPlatformError( + raise PackagingPlatformError( "user base directory is not specified") self.install_base = self.install_platbase = self.install_userbase self.select_scheme("posix_user") @@ -372,7 +367,7 @@ else: if self.prefix is None: if self.exec_prefix is not None: - raise DistutilsOptionError( + raise PackagingOptionError( "must not supply exec-prefix without prefix") self.prefix = os.path.normpath(sys.prefix) @@ -390,7 +385,7 @@ """Finalize options for non-posix platforms""" if HAS_USER_SITE and self.user: if self.install_userbase is None: - raise DistutilsPlatformError( + raise PackagingPlatformError( "user base directory is not specified") self.install_base = self.install_platbase = self.install_userbase self.select_scheme(os.name + "_user") @@ -405,7 +400,7 @@ try: self.select_scheme(os.name) except KeyError: - raise DistutilsPlatformError( + raise PackagingPlatformError( "no support for installation on '%s'" % os.name) def dump_dirs(self, msg): @@ -428,7 +423,7 @@ """Set the install directories by applying the install schemes.""" # it's the caller's problem if they supply a bad name! scheme = get_paths(name, expand=False) - for key, value in scheme.iteritems(): + for key, value in scheme.items(): if key == 'platinclude': key = 'headers' value = os.path.join(value, self.distribution.metadata['Name']) @@ -469,7 +464,7 @@ self.extra_path = self.distribution.extra_path if self.extra_path is not None: - if isinstance(self.extra_path, str): + if isinstance(self.extra_path, basestring): self.extra_path = self.extra_path.split(',') if len(self.extra_path) == 1: @@ -477,7 +472,7 @@ elif len(self.extra_path) == 2: path_file, extra_dirs = self.extra_path else: - raise DistutilsOptionError( + raise PackagingOptionError( "'extra_path' option must be a list, tuple, or " "comma-separated string with 1 or 2 elements") @@ -504,9 +499,9 @@ if HAS_USER_SITE and not self.user: return home = convert_path(os.path.expanduser("~")) - for name, path in self.config_vars.iteritems(): + for name, path in self.config_vars.items(): if path.startswith(home) and not os.path.isdir(path): - os.makedirs(path, 0700) + os.makedirs(path, 0o700) # -- Command execution methods ------------------------------------- @@ -521,7 +516,7 @@ # internally, and not to sys.path, so we don't check the platform # matches what we are running. if self.warn_dir and build_plat != get_platform(): - raise DistutilsPlatformError("Can't install when " + raise PackagingPlatformError("Can't install when " "cross-compiling") # Run all sub-commands (at least those that need to be run) @@ -536,16 +531,16 @@ outputs = self.get_outputs() if self.root: # strip any package prefix root_len = len(self.root) - for counter in xrange(len(outputs)): + for counter in range(len(outputs)): outputs[counter] = outputs[counter][root_len:] self.execute(write_file, (self.record, outputs), "writing list of installed files to '%s'" % self.record) - sys_path = map(os.path.normpath, sys.path) - sys_path = map(os.path.normcase, sys_path) - install_lib = os.path.normcase(os.path.normpath(self.install_lib)) + normpath, normcase = os.path.normpath, os.path.normcase + sys_path = [normcase(normpath(p)) for p in sys.path] + install_lib = normcase(normpath(self.install_lib)) if (self.warn_dir and not (self.path_file and self.install_path_file) and install_lib not in sys_path): @@ -563,7 +558,8 @@ (filename, [self.extra_dirs]), "creating %s" % filename) else: - self.warn("path file '%s' not created" % filename) + logger.warning('%s: path file %r not created', + self.get_command_name(), filename) # -- Reporting methods --------------------------------------------- diff --git a/distutils2/command/install_distinfo.py b/distutils2/command/install_distinfo.py --- a/distutils2/command/install_distinfo.py +++ b/distutils2/command/install_distinfo.py @@ -1,27 +1,16 @@ -""" -distutils.command.install_distinfo -================================== +"""Create the PEP 376-compliant .dist-info directory.""" -:Author: Josip Djolonga +# Forked from the former install_egg_info command by Josip Djolonga -This module implements the ``install_distinfo`` command that creates the -``.dist-info`` directory for the distribution, as specified in :pep:`376`. -Usually, you do not have to call this command directly, it gets called -automatically by the ``install_dist`` command. -""" - -# This file was created from the code for the former command install_egg_info - +import codecs import csv -from distutils2 import logger -from distutils2._backport.shutil import rmtree -from distutils2.command.cmd import Command import os import re -try: - import hashlib -except ImportError: - from distutils2._backport import hashlib +import hashlib + +from distutils2.command.cmd import Command +from distutils2 import logger +from shutil import rmtree class install_distinfo(Command): @@ -72,13 +61,11 @@ if self.no_resources is None: self.no_resources = False - metadata = self.distribution.metadata basename = "%s-%s.dist-info" % ( - to_filename(safe_name(metadata['Name'])), - to_filename(safe_version(metadata['Version'])), - ) + to_filename(safe_name(metadata['Name'])), + to_filename(safe_version(metadata['Version']))) self.distinfo_dir = os.path.join(self.distinfo_dir, basename) self.outputs = [] @@ -105,18 +92,14 @@ installer_path = os.path.join(self.distinfo_dir, 'INSTALLER') logger.info('creating %s', installer_path) - f = open(installer_path, 'w') - try: + with open(installer_path, 'w') as f: f.write(self.installer) - finally: - f.close() self.outputs.append(installer_path) if self.requested: requested_path = os.path.join(self.distinfo_dir, 'REQUESTED') logger.info('creating %s', requested_path) - f = open(requested_path, 'w') - f.close() + open(requested_path, 'wb').close() self.outputs.append(requested_path) @@ -126,25 +109,21 @@ resources_path = os.path.join(self.distinfo_dir, 'RESOURCES') logger.info('creating %s', resources_path) - f = open(resources_path, 'wb') - try: + with open(resources_path, 'wb') as f: writer = csv.writer(f, delimiter=',', - lineterminator=os.linesep, + lineterminator='\n', quotechar='"') for tuple in install_data.get_resources_out(): writer.writerow(tuple) self.outputs.append(resources_path) - finally: - f.close() if not self.no_record: record_path = os.path.join(self.distinfo_dir, 'RECORD') logger.info('creating %s', record_path) - f = open(record_path, 'wb') - try: + with codecs.open(record_path, 'w', encoding='utf-8') as f: writer = csv.writer(f, delimiter=',', - lineterminator=os.linesep, + lineterminator='\n', quotechar='"') install = self.get_finalized_command('install_dist') @@ -155,18 +134,15 @@ writer.writerow((fpath, '', '')) else: size = os.path.getsize(fpath) - fd = open(fpath, 'r') - hash = hashlib.md5() - hash.update(fd.read()) + with open(fpath, 'rb') as fp: + hash = hashlib.md5() + hash.update(fp.read()) md5sum = hash.hexdigest() writer.writerow((fpath, md5sum, size)) # add the RECORD file itself writer.writerow((record_path, '', '')) self.outputs.append(record_path) - finally: - f.close() - def get_outputs(self): return self.outputs diff --git a/distutils2/command/install_headers.py b/distutils2/command/install_headers.py --- a/distutils2/command/install_headers.py +++ b/distutils2/command/install_headers.py @@ -1,8 +1,4 @@ -"""distutils.command.install_headers - -Implements the Distutils 'install_headers' command, to install C/C++ header -files to the Python include directory.""" - +"""Install C/C++ header files to the Python include directory.""" from distutils2.command.cmd import Command @@ -22,7 +18,7 @@ def initialize_options(self): self.install_dir = None - self.force = 0 + self.force = False self.outfiles = [] def finalize_options(self): @@ -37,7 +33,7 @@ self.mkpath(self.install_dir) for header in headers: - (out, _) = self.copy_file(header, self.install_dir) + out = self.copy_file(header, self.install_dir)[0] self.outfiles.append(out) def get_inputs(self): @@ -45,5 +41,3 @@ def get_outputs(self): return self.outfiles - -# class install_headers diff --git a/distutils2/command/install_lib.py b/distutils2/command/install_lib.py --- a/distutils2/command/install_lib.py +++ b/distutils2/command/install_lib.py @@ -1,14 +1,12 @@ -"""distutils.command.install_lib - -Implements the Distutils 'install_lib' command -(install all Python modules).""" - +"""Install all modules (extensions and pure Python).""" import os import sys +import logging +from distutils2 import logger from distutils2.command.cmd import Command -from distutils2.errors import DistutilsOptionError +from distutils2.errors import PackagingOptionError # Extension for Python source files. @@ -19,7 +17,7 @@ class install_lib(Command): - description = "install all Python modules (extensions and pure Python)" + description = "install all modules (extensions and pure Python)" # The byte-compilation options are a tad confusing. Here are the # possible scenarios: @@ -55,7 +53,7 @@ # let the 'install_dist' command dictate our installation directory self.install_dir = None self.build_dir = None - self.force = 0 + self.force = False self.compile = None self.optimize = None self.skip_build = None @@ -70,7 +68,7 @@ 'force', 'compile', 'optimize', 'skip_build') if self.compile is None: - self.compile = 1 + self.compile = True if self.optimize is None: self.optimize = 0 @@ -80,7 +78,7 @@ if self.optimize not in (0, 1, 2): raise AssertionError except (ValueError, AssertionError): - raise DistutilsOptionError, "optimize must be 0, 1, or 2" + raise PackagingOptionError("optimize must be 0, 1, or 2") def run(self): # Make sure we have built everything we need first @@ -109,14 +107,19 @@ if os.path.isdir(self.build_dir): outfiles = self.copy_tree(self.build_dir, self.install_dir) else: - self.warn("'%s' does not exist -- no Python modules to install" % - self.build_dir) + logger.warning( + '%s: %r does not exist -- no Python modules to install', + self.get_command_name(), self.build_dir) return return outfiles def byte_compile(self, files): - if hasattr(sys, 'dont_write_bytecode') and sys.dont_write_bytecode: - self.warn('byte-compiling is disabled, skipping.') + if getattr(sys, 'dont_write_bytecode'): + # XXX do we want this? because a Python runs without bytecode + # doesn't mean that the *dists should not contain bytecode + #--or does it? + logger.warning('%s: byte-compiling is disabled, skipping.', + self.get_command_name()) return from distutils2.util import byte_compile @@ -127,6 +130,10 @@ # should at least generate usable bytecode in RPM distributions. install_root = self.get_finalized_command('install_dist').root + # Temporary kludge until we remove the verbose arguments and use + # logging everywhere + verbose = logger.getEffectiveLevel() >= logging.DEBUG + if self.compile: byte_compile(files, optimize=0, force=self.force, prefix=install_root, @@ -134,7 +141,8 @@ if self.optimize > 0: byte_compile(files, optimize=self.optimize, force=self.force, prefix=install_root, - verbose=self.verbose, dry_run=self.dry_run) + verbose=verbose, + dry_run=self.dry_run) # -- Utility methods ----------------------------------------------- diff --git a/distutils2/command/install_scripts.py b/distutils2/command/install_scripts.py --- a/distutils2/command/install_scripts.py +++ b/distutils2/command/install_scripts.py @@ -1,17 +1,12 @@ -"""distutils.command.install_scripts +"""Install scripts.""" -Implements the Distutils 'install_scripts' command, for installing -Python scripts.""" - -# contributed by Bastian Kleineidam - +# Contributed by Bastian Kleineidam import os from distutils2.command.cmd import Command from distutils2 import logger -from stat import ST_MODE -class install_scripts (Command): +class install_scripts(Command): description = "install scripts (Python or otherwise)" @@ -25,19 +20,19 @@ boolean_options = ['force', 'skip-build'] - def initialize_options (self): + def initialize_options(self): self.install_dir = None - self.force = 0 + self.force = False self.build_dir = None self.skip_build = None - def finalize_options (self): + def finalize_options(self): self.set_undefined_options('build', ('build_scripts', 'build_dir')) self.set_undefined_options('install_dist', ('install_scripts', 'install_dir'), 'force', 'skip_build') - def run (self): + def run(self): if not self.skip_build: self.run_command('build_scripts') @@ -53,11 +48,11 @@ if self.dry_run: logger.info("changing mode of %s", file) else: - mode = ((os.stat(file)[ST_MODE]) | 0555) & 07777 + mode = (os.stat(file).st_mode | 0o555) & 0o7777 logger.info("changing mode of %s to %o", file, mode) os.chmod(file, mode) - def get_inputs (self): + def get_inputs(self): return self.distribution.scripts or [] def get_outputs(self): diff --git a/distutils2/command/register.py b/distutils2/command/register.py --- a/distutils2/command/register.py +++ b/distutils2/command/register.py @@ -1,26 +1,20 @@ -"""distutils.command.register +"""Register a release with a project index.""" -Implements the Distutils 'register' command (register with the repository). -""" +# Contributed by Richard Jones -# created 2002/10/21, Richard Jones - - -import urllib2 +import sys import getpass import urlparse -import StringIO -import logging +import urllib2 +from distutils2 import logger +from distutils2.util import (read_pypirc, generate_pypirc, DEFAULT_REPOSITORY, + DEFAULT_REALM, get_pypirc_path, encode_multipart) from distutils2.command.cmd import Command -from distutils2 import logger -from distutils2.metadata import metadata_to_dict -from distutils2.util import (read_pypirc, generate_pypirc, DEFAULT_REPOSITORY, - DEFAULT_REALM, get_pypirc_path) class register(Command): - description = "register the distribution with the Python package index" + description = "register a release with PyPI" user_options = [ ('repository=', 'r', "repository URL [default: %s]" % DEFAULT_REPOSITORY), @@ -37,9 +31,9 @@ def initialize_options(self): self.repository = None self.realm = None - self.show_response = 0 - self.list_classifiers = 0 - self.strict = 0 + self.show_response = False + self.list_classifiers = False + self.strict = False def finalize_options(self): if self.repository is None: @@ -48,14 +42,17 @@ self.realm = DEFAULT_REALM def run(self): - self.finalize_options() self._set_config() # Check the package metadata check = self.distribution.get_command_obj('check') - check.strict = self.strict - check.all = 1 - self.run_command('check') + if check.strict != self.strict and not check.all: + # If check was already run but with different options, + # re-run it + check.strict = self.strict + check.all = True + self.distribution.have_run.pop('check', None) + self.run_command('check') if self.dry_run: self.verify_metadata() @@ -123,6 +120,9 @@ 3. set the password to a random string and email the user. ''' + # TODO factor registration out into another method + # TODO use print to print, not logging + # see if we can short-cut and get the username/password from the # config if self.has_config: @@ -136,24 +136,24 @@ # get the user's login info choices = '1 2 3 4'.split() while choice not in choices: - self.announce('''\ + logger.info('''\ We need to know who you are, so please choose either: 1. use your existing login, 2. register as a new user, 3. have the server generate a new password for you (and email it to you), or 4. quit -Your selection [default 1]: ''', logging.INFO) +Your selection [default 1]: ''') - choice = raw_input() + choice = input() if not choice: choice = '1' elif choice not in choices: - print 'Please choose one of the four options!' + print('Please choose one of the four options!') if choice == '1': # get the username and password while not username: - username = raw_input('Username: ') + username = input('Username: ') while not password: password = getpass.getpass('Password: ') @@ -164,8 +164,7 @@ # send the info to the server and report the result code, result = self.post_to_server(self.build_post_data('submit'), auth) - self.announce('Server response (%s): %s' % (code, result), - logging.INFO) + logger.info('Server response (%s): %s', code, result) # possibly save the login if code == 200: @@ -174,14 +173,13 @@ # so the upload command can reuse it self.distribution.password = password else: - self.announce(('I can store your PyPI login so future ' - 'submissions will be faster.'), - logging.INFO) - self.announce('(the login will be stored in %s)' % \ - get_pypirc_path(), logging.INFO) + logger.info( + 'I can store your PyPI login so future submissions ' + 'will be faster.\n(the login will be stored in %s)', + get_pypirc_path()) choice = 'X' while choice.lower() not in 'yn': - choice = raw_input('Save your login (y/N)?') + choice = input('Save your login (y/N)?') if not choice: choice = 'n' if choice.lower() == 'y': @@ -192,7 +190,7 @@ data['name'] = data['password'] = data['email'] = '' data['confirm'] = None while not data['name']: - data['name'] = raw_input('Username: ') + data['name'] = input('Username: ') while data['password'] != data['confirm']: while not data['password']: data['password'] = getpass.getpass('Password: ') @@ -201,9 +199,9 @@ if data['password'] != data['confirm']: data['password'] = '' data['confirm'] = None - print "Password and confirm don't match!" + print("Password and confirm don't match!") while not data['email']: - data['email'] = raw_input(' EMail: ') + data['email'] = input(' EMail: ') code, result = self.post_to_server(data) if code != 200: logger.info('server response (%s): %s', code, result) @@ -214,14 +212,14 @@ data = {':action': 'password_reset'} data['email'] = '' while not data['email']: - data['email'] = raw_input('Your email address: ') + data['email'] = input('Your email address: ') code, result = self.post_to_server(data) logger.info('server response (%s): %s', code, result) def build_post_data(self, action): # figure the data to send - the metadata plus some additional # information used by the package server - data = metadata_to_dict(self.distribution.metadata) + data = self.distribution.metadata.todict() data[':action'] = action return data @@ -230,33 +228,13 @@ ''' Post a query to the server, and return a string response. ''' if 'name' in data: - self.announce('Registering %s to %s' % (data['name'], - self.repository), - logging.INFO) + logger.info('Registering %s to %s', data['name'], self.repository) # Build up the MIME payload for the urllib2 POST data - boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254' - sep_boundary = '\n--' + boundary - end_boundary = sep_boundary + '--' - body = StringIO.StringIO() - for key, value in data.iteritems(): - # handle multiple entries for the same name - if not isinstance(value, (tuple, list)): - value = [value] - - for value in value: - body.write(sep_boundary) - body.write('\nContent-Disposition: form-data; name="%s"'%key) - body.write("\n\n") - body.write(value) - if value and value[-1] == '\r': - body.write('\n') # write an extra newline (lurve Macs) - body.write(end_boundary) - body.write("\n") - body = body.getvalue() + content_type, body = encode_multipart(data.items(), []) # build the Request headers = { - 'Content-type': 'multipart/form-data; boundary=%s; charset=utf-8'%boundary, + 'Content-type': content_type, 'Content-length': str(len(body)) } req = urllib2.Request(self.repository, body, headers) @@ -268,18 +246,19 @@ data = '' try: result = opener.open(req) - except urllib2.HTTPError, e: + except urllib2.HTTPError: + e = sys.exc_info()[1] if self.show_response: data = e.fp.read() result = e.code, e.msg - except urllib2.URLError, e: - result = 500, str(e) + except urllib2.URLError: + result = 500, str(sys.exc_info()[1]) else: if self.show_response: data = result.read() result = 200, 'OK' if self.show_response: dashes = '-' * 75 - self.announce('%s%s%s' % (dashes, data, dashes)) + logger.info('%s%s%s', dashes, data, dashes) return result diff --git a/distutils2/command/sdist.py b/distutils2/command/sdist.py --- a/distutils2/command/sdist.py +++ b/distutils2/command/sdist.py @@ -1,34 +1,27 @@ -"""distutils.command.sdist +"""Create a source distribution.""" -Implements the Distutils 'sdist' command (create a source distribution).""" import os +import re import sys +from StringIO import StringIO from shutil import rmtree -import re -from StringIO import StringIO -try: - from shutil import get_archive_formats -except ImportError: - from distutils2._backport.shutil import get_archive_formats - +from distutils2 import logger +from distutils2.util import resolve_name, get_archive_formats +from distutils2.errors import (PackagingPlatformError, PackagingOptionError, + PackagingModuleError, PackagingFileError) from distutils2.command import get_command_names from distutils2.command.cmd import Command -from distutils2.errors import (DistutilsPlatformError, DistutilsOptionError, - DistutilsModuleError, DistutilsFileError) from distutils2.manifest import Manifest -from distutils2 import logger -from distutils2.util import resolve_name + def show_formats(): """Print all possible values for the 'formats' option (used by the "--help-formats" command-line option). """ from distutils2.fancy_getopt import FancyGetopt - formats = [] - for name, desc in get_archive_formats(): - formats.append(("formats=" + name, None, desc)) - formats.sort() + formats = sorted(('formats=' + name, None, desc) + for name, desc in get_archive_formats()) FancyGetopt(formats).print_help( "List of available source distribution formats:") @@ -36,6 +29,7 @@ _COLLAPSE_PATTERN = re.compile('\\\w\n', re.M) _COMMENTED_LINE = re.compile('^#.*\n$|^\w*\n$', re.M) + class sdist(Command): description = "create a source distribution (tarball, zip file, etc.)" @@ -84,26 +78,24 @@ ] negative_opt = {'no-defaults': 'use-defaults', - 'no-prune': 'prune' } + 'no-prune': 'prune'} default_format = {'posix': 'gztar', - 'nt': 'zip' } - + 'nt': 'zip'} def initialize_options(self): self.manifest = None - # 'use_defaults': if true, we will include the default file set # in the manifest - self.use_defaults = 1 - self.prune = 1 - self.manifest_only = 0 + self.use_defaults = True + self.prune = True + self.manifest_only = False self.formats = None - self.keep_temp = 0 + self.keep_temp = False self.dist_dir = None self.archive_files = None - self.metadata_check = 1 + self.metadata_check = True self.owner = None self.group = None self.filelist = None @@ -125,14 +117,13 @@ try: self.formats = [self.default_format[os.name]] except KeyError: - raise DistutilsPlatformError, \ - "don't know how to create source distributions " + \ - "on platform %s" % os.name + raise PackagingPlatformError("don't know how to create source " + "distributions on platform %s" % os.name) bad_format = self._check_archive_formats(self.formats) if bad_format: - raise DistutilsOptionError, \ - "unknown archive format '%s'" % bad_format + raise PackagingOptionError("unknown archive format '%s'" \ + % bad_format) if self.dist_dir is None: self.dist_dir = "dist" @@ -143,7 +134,7 @@ if self.manifest_builders is None: self.manifest_builders = [] else: - if isinstance(self.manifest_builders, str): + if isinstance(self.manifest_builders, basestring): self.manifest_builders = self.manifest_builders.split(',') builders = [] for builder in self.manifest_builders: @@ -152,14 +143,13 @@ continue try: builder = resolve_name(builder) - except ImportError, e: - raise DistutilsModuleError(e) + except ImportError: + raise PackagingModuleError(sys.exc_info()[1]) builders.append(builder) self.manifest_builders = builders - def run(self): # 'filelist' contains the list of files that will make up the # manifest @@ -191,7 +181,8 @@ """ template_exists = len(self.distribution.extra_files) > 0 if not template_exists: - self.warn('Using default file list') + logger.warning('%s: using default file list', + self.get_command_name()) self.filelist.findall() if self.use_defaults: @@ -210,20 +201,24 @@ self.filelist.write(self.manifest) def add_defaults(self): - """Add all the default files to self.filelist: - - all pure Python modules mentioned in setup script - - all files pointed by package_data (build_py) - - all files defined in data_files. - - all files defined as scripts. - - all C sources listed as part of extensions or C libraries - in the setup script (doesn't catch C headers!) - Warns if (README or README.txt) or setup.py are missing; everything - else is optional. + """Add all default files to self.filelist. + + In addition to the setup.cfg file, this will include all files returned + by the get_source_files of every registered command. This will find + Python modules and packages, data files listed in package_data_, + data_files and extra_files, scripts, C sources of extension modules or + C libraries (headers are missing). """ + if os.path.exists('setup.cfg'): + self.filelist.append('setup.cfg') + else: + logger.warning("%s: standard 'setup.cfg' file not found", + self.get_command_name()) + for cmd_name in get_command_names(): try: cmd_obj = self.get_finalized_command(cmd_name) - except DistutilsOptionError: + except PackagingOptionError: pass else: self.filelist.extend(cmd_obj.get_source_files()) @@ -252,8 +247,7 @@ vcs_dirs = ['RCS', 'CVS', r'\.svn', r'\.hg', r'\.git', r'\.bzr', '_darcs'] vcs_ptrn = r'(^|%s)(%s)(%s).*' % (seps, '|'.join(vcs_dirs), seps) - self.filelist.exclude_pattern(vcs_ptrn, is_regex=1) - + self.filelist.exclude_pattern(vcs_ptrn, is_regex=True) def make_release_tree(self, base_dir, files): """Create the directory tree that will become the source @@ -285,18 +279,19 @@ msg = "copying files to %s..." % base_dir if not files: - logger.warn("no files to distribute -- empty manifest?") + logger.warning("no files to distribute -- empty manifest?") else: logger.info(msg) for file in self.distribution.metadata.requires_files: if file not in files: - msg = "'%s' must be included explicitly in 'extra_files'" % file - raise DistutilsFileError(msg) + msg = "'%s' must be included explicitly in 'extra_files'" \ + % file + raise PackagingFileError(msg) for file in files: if not os.path.isfile(file): - logger.warn("'%s' not a regular file -- skipping", file) + logger.warning("'%s' not a regular file -- skipping", file) else: dest = os.path.join(base_dir, file) self.copy_file(file, dest, link=link) @@ -342,12 +337,12 @@ """ return self.archive_files - def create_tree(self, base_dir, files, mode=0777, verbose=1, dry_run=0): - need_dir = {} + def create_tree(self, base_dir, files, mode=0o777, verbose=1, + dry_run=False): + need_dir = set() for file in files: - need_dir[os.path.join(base_dir, os.path.dirname(file))] = 1 - need_dirs = sorted(need_dir) + need_dir.add(os.path.join(base_dir, os.path.dirname(file))) # Now create them - for dir in need_dirs: + for dir in sorted(need_dir): self.mkpath(dir, mode, verbose=verbose, dry_run=dry_run) diff --git a/distutils2/command/test.py b/distutils2/command/test.py --- a/distutils2/command/test.py +++ b/distutils2/command/test.py @@ -1,20 +1,20 @@ +"""Run the project's test suite.""" + import os import sys +import logging import unittest +from distutils2 import logger from distutils2.command.cmd import Command -from distutils2.errors import DistutilsOptionError +from distutils2.database import get_distribution +from distutils2.errors import PackagingOptionError from distutils2.util import resolve_name -try: - from pkgutil import get_distribution -except ImportError: - from distutils2._backport.pkgutil import get_distribution - class test(Command): - description = "run the distribution's test suite" + description = "run the project's test suite" user_options = [ ('suite=', 's', @@ -34,11 +34,11 @@ self.build_lib = self.get_finalized_command("build").build_lib for requirement in self.tests_require: if get_distribution(requirement) is None: - self.announce("test dependency %s is not installed, " - "tests may fail" % requirement) + logger.warning("test dependency %s is not installed, " + "tests may fail", requirement) if (not self.suite and not self.runner and self.get_ut_with_discovery() is None): - raise DistutilsOptionError( + raise PackagingOptionError( "no test discovery available, please give a 'suite' or " "'runner' option or install unittest2") @@ -60,16 +60,22 @@ self.run_command('build') sys.path.insert(0, build.build_lib) + # Temporary kludge until we remove the verbose arguments and use + # logging everywhere + logger = logging.getLogger('distutils2') + verbose = logger.getEffectiveLevel() >= logging.DEBUG + verbosity = verbose + 1 + # run the tests if self.runner: resolve_name(self.runner)() elif self.suite: - runner = unittest.TextTestRunner(verbosity=self.verbose + 1) + runner = unittest.TextTestRunner(verbosity=verbosity) runner.run(resolve_name(self.suite)()) elif self.get_ut_with_discovery(): ut = self.get_ut_with_discovery() test_suite = ut.TestLoader().discover(os.curdir) - runner = ut.TextTestRunner(verbosity=self.verbose + 1) + runner = ut.TextTestRunner(verbosity=verbosity) runner.run(test_suite) finally: sys.path[:] = prev_syspath diff --git a/distutils2/command/upload.py b/distutils2/command/upload.py --- a/distutils2/command/upload.py +++ b/distutils2/command/upload.py @@ -1,27 +1,21 @@ -"""distutils.command.upload +"""Upload a distribution to a project index.""" -Implements the Distutils 'upload' subcommand (upload package to PyPI).""" -import os +import os, sys import socket +import logging import platform -import logging -from urllib2 import urlopen, Request, HTTPError +import urlparse +from io import BytesIO from base64 import standard_b64encode -import urlparse -try: - from cStringIO import StringIO -except ImportError: - from StringIO import StringIO -try: - from hashlib import md5 -except ImportError: - from distutils2._backport.hashlib import md5 +from hashlib import md5 +from urllib2 import HTTPError +from urllib2 import urlopen, Request -from distutils2.errors import DistutilsOptionError -from distutils2.util import spawn +from distutils2 import logger +from distutils2.errors import PackagingOptionError +from distutils2.util import (spawn, read_pypirc, DEFAULT_REPOSITORY, + DEFAULT_REALM, encode_multipart) from distutils2.command.cmd import Command -from distutils2.metadata import metadata_to_dict -from distutils2.util import read_pypirc, DEFAULT_REPOSITORY, DEFAULT_REALM class upload(Command): @@ -46,10 +40,10 @@ def initialize_options(self): self.repository = None self.realm = None - self.show_response = 0 + self.show_response = False self.username = '' self.password = '' - self.show_response = 0 + self.show_response = False self.sign = False self.identity = None self.upload_docs = False @@ -60,9 +54,8 @@ if self.realm is None: self.realm = DEFAULT_REALM if self.identity and not self.sign: - raise DistutilsOptionError( - "Must use --sign for --identity to have meaning" - ) + raise PackagingOptionError( + "Must use --sign for --identity to have meaning") config = read_pypirc(self.repository, self.realm) if config != {}: self.username = config['username'] @@ -77,7 +70,8 @@ def run(self): if not self.distribution.dist_files: - raise DistutilsOptionError("No dist file created in earlier command") + raise PackagingOptionError( + "No dist file created in earlier command") for command, pyversion, filename in self.distribution.dist_files: self.upload_file(command, pyversion, filename) if self.upload_docs: @@ -90,13 +84,13 @@ # XXX to be refactored with register.post_to_server def upload_file(self, command, pyversion, filename): # Makes sure the repository URL is compliant - schema, netloc, url, params, query, fragments = \ + scheme, netloc, url, params, query, fragments = \ urlparse.urlparse(self.repository) if params or query or fragments: raise AssertionError("Incompatible url %s" % self.repository) - if schema not in ('http', 'https'): - raise AssertionError("unsupported schema " + schema) + if scheme not in ('http', 'https'): + raise AssertionError("unsupported scheme " + scheme) # Sign if requested if self.sign: @@ -108,99 +102,69 @@ # Fill in the data - send all the metadata in case we need to # register a new release - content = open(filename,'rb').read() + with open(filename, 'rb') as f: + content = f.read() - data = metadata_to_dict(self.distribution.metadata) + data = self.distribution.metadata.todict() # extra upload infos data[':action'] = 'file_upload' data['protcol_version'] = '1' - data['content'] = [os.path.basename(filename), content] + data['content'] = (os.path.basename(filename), content) data['filetype'] = command data['pyversion'] = pyversion data['md5_digest'] = md5(content).hexdigest() - comment = '' if command == 'bdist_dumb': - comment = 'built for %s' % platform.platform(terse=1) - data['comment'] = comment + data['comment'] = 'built for %s' % platform.platform(terse=True) if self.sign: - data['gpg_signature'] = [(os.path.basename(filename) + ".asc", - open(filename+".asc").read())] + with open(filename + '.asc') as fp: + sig = fp.read() + data['gpg_signature'] = [ + (os.path.basename(filename) + ".asc", sig)] # set up the authentication - auth = "Basic " + standard_b64encode(self.username + ":" + - self.password) + # The exact encoding of the authentication string is debated. + # Anyway PyPI only accepts ascii for both username or password. + user_pass = (self.username + ":" + self.password).encode('ascii') + auth = b"Basic " + standard_b64encode(user_pass) # Build up the MIME payload for the POST data - boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254' - sep_boundary = '\n--' + boundary - end_boundary = sep_boundary + '--' - body = StringIO() - file_fields = ('content', 'gpg_signature') + files = [] + for key in ('content', 'gpg_signature'): + if key in data: + filename_, value = data.pop(key) + files.append((key, filename_, value)) - for key, values in data.iteritems(): - # handle multiple entries for the same name - if not isinstance(values, (tuple, list)): - values = [values] + content_type, body = encode_multipart(data.items(), files) - content_dispo = 'Content-Disposition: form-data; name="%s"' % key - - if key in file_fields: - filename_, content = values - filename_ = ';filename="%s"' % filename_ - body.write(sep_boundary) - body.write("\n") - body.write(content_dispo) - body.write(filename_) - body.write("\n\n") - body.write(content) - else: - for value in values: - body.write(sep_boundary) - body.write("\n") - body.write(content_dispo) - body.write("\n\n") - body.write(value) - if value and value[-1] == '\r': - # write an extra newline (lurve Macs) - body.write('\n') - - body.write(end_boundary) - body.write("\n") - body = body.getvalue() - - self.announce("Submitting %s to %s" % (filename, self.repository), - logging.INFO) + logger.info("Submitting %s to %s", filename, self.repository) # build the Request - headers = {'Content-type': - 'multipart/form-data; boundary=%s' % boundary, + headers = {'Content-type': content_type, 'Content-length': str(len(body)), 'Authorization': auth} - request = Request(self.repository, data=body, - headers=headers) + request = Request(self.repository, body, headers) # send the data try: result = urlopen(request) status = result.code reason = result.msg - except socket.error, e: - self.announce(str(e), logging.ERROR) + except socket.error: + logger.error(sys.exc_info()[1]) return - except HTTPError, e: + except HTTPError: + e = sys.exc_info()[1] status = e.code reason = e.msg if status == 200: - self.announce('Server response (%s): %s' % (status, reason), - logging.INFO) + logger.info('Server response (%s): %s', status, reason) else: - self.announce('Upload failed (%s): %s' % (status, reason), - logging.ERROR) + logger.error('Upload failed (%s): %s', status, reason) - if self.show_response: - msg = '\n'.join(('-' * 75, result.read(), '-' * 75)) - self.announce(msg, logging.INFO) + if self.show_response and logger.isEnabledFor(logging.INFO): + sep = '-' * 75 + logger.info('%s\n%s\n%s', sep, result.read().decode(), sep) diff --git a/distutils2/command/upload_docs.py b/distutils2/command/upload_docs.py --- a/distutils2/command/upload_docs.py +++ b/distutils2/command/upload_docs.py @@ -1,65 +1,38 @@ -import os +"""Upload HTML documentation to a project index.""" + +import os, sys import base64 -import httplib import socket -import urlparse import zipfile import logging -try: - from cStringIO import StringIO -except ImportError: - from StringIO import StringIO +import httplib +import urlparse +from io import BytesIO from distutils2 import logger -from distutils2.command.upload import upload +from distutils2.util import (read_pypirc, DEFAULT_REPOSITORY, DEFAULT_REALM, + encode_multipart) +from distutils2.errors import PackagingFileError from distutils2.command.cmd import Command -from distutils2.errors import DistutilsFileError -from distutils2.util import read_pypirc, DEFAULT_REPOSITORY, DEFAULT_REALM + def zip_dir(directory): - """Compresses recursively contents of directory into a StringIO object""" - destination = StringIO() - zip_file = zipfile.ZipFile(destination, "w") - for root, dirs, files in os.walk(directory): - for name in files: - full = os.path.join(root, name) - relative = root[len(directory):].lstrip(os.path.sep) - dest = os.path.join(relative, name) - zip_file.write(full, dest) - zip_file.close() + """Compresses recursively contents of directory into a BytesIO object""" + destination = BytesIO() + with zipfile.ZipFile(destination, "w") as zip_file: + for root, dirs, files in os.walk(directory): + for name in files: + full = os.path.join(root, name) + relative = root[len(directory):].lstrip(os.path.sep) + dest = os.path.join(relative, name) + zip_file.write(full, dest) return destination -# grabbed from -# http://code.activestate.com/recipes/146306-http-client-to-post-using-multipartform-data/ -def encode_multipart(fields, files, boundary=None): - """ - fields is a sequence of (name, value) elements for regular form fields. - files is a sequence of (name, filename, value) elements for data to be uploaded as files - Return (content_type, body) ready for httplib.HTTP instance - """ - if boundary is None: - boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254' - l = [] - for (key, value) in fields: - l.extend([ - '--' + boundary, - 'Content-Disposition: form-data; name="%s"' % key, - '', - value]) - for (key, filename, value) in files: - l.extend([ - '--' + boundary, - 'Content-Disposition: form-data; name="%s"; filename="%s"' % (key, filename), - '', - value]) - l.append('--' + boundary + '--') - l.append('') - body = '\r\n'.join(l) - content_type = 'multipart/form-data; boundary=%s' % boundary - return content_type, body class upload_docs(Command): + description = "upload HTML documentation to PyPI" + user_options = [ ('repository=', 'r', "repository URL [default: %s]" % DEFAULT_REPOSITORY), @@ -72,7 +45,7 @@ def initialize_options(self): self.repository = None self.realm = None - self.show_response = 0 + self.show_response = False self.upload_dir = None self.username = '' self.password = '' @@ -87,7 +60,7 @@ self.upload_dir = os.path.join(build.build_base, "docs") if not os.path.isdir(self.upload_dir): self.upload_dir = os.path.join(build.build_base, "doc") - self.announce('Using upload directory %s' % self.upload_dir) + logger.info('Using upload directory %s', self.upload_dir) self.verify_upload_dir(self.upload_dir) config = read_pypirc(self.repository, self.realm) if config != {}: @@ -101,30 +74,31 @@ index_location = os.path.join(upload_dir, "index.html") if not os.path.exists(index_location): mesg = "No 'index.html found in docs directory (%s)" - raise DistutilsFileError(mesg % upload_dir) + raise PackagingFileError(mesg % upload_dir) def run(self): name = self.distribution.metadata['Name'] version = self.distribution.metadata['Version'] zip_file = zip_dir(self.upload_dir) - fields = [(':action', 'doc_upload'), ('name', name), ('version', version)] + fields = [(':action', 'doc_upload'), + ('name', name), ('version', version)] files = [('content', name, zip_file.getvalue())] content_type, body = encode_multipart(fields, files) credentials = self.username + ':' + self.password - auth = "Basic " + base64.encodestring(credentials).strip() + auth = b"Basic " + base64.encodebytes(credentials.encode()).strip() - self.announce("Submitting documentation to %s" % (self.repository)) + logger.info("Submitting documentation to %s", self.repository) - schema, netloc, url, params, query, fragments = \ - urlparse.urlparse(self.repository) - if schema == "http": + scheme, netloc, url, params, query, fragments = urlparse.urlparse( + self.repository) + if scheme == "http": conn = httplib.HTTPConnection(netloc) - elif schema == "https": + elif scheme == "https": conn = httplib.HTTPSConnection(netloc) else: - raise AssertionError("unsupported schema "+schema) + raise AssertionError("unsupported scheme %r" % scheme) try: conn.connect() @@ -134,23 +108,23 @@ conn.putheader('Authorization', auth) conn.endheaders() conn.send(body) - except socket.error, e: - self.announce(str(e), logging.ERROR) + + except socket.error: + logger.error(sys.exc_info()[1]) return r = conn.getresponse() if r.status == 200: - self.announce('Server response (%s): %s' % (r.status, r.reason)) + logger.info('Server response (%s): %s', r.status, r.reason) elif r.status == 301: location = r.getheader('Location') if location is None: location = 'http://packages.python.org/%s/' % name - self.announce('Upload successful. Visit %s' % location) + logger.info('Upload successful. Visit %s', location) else: - self.announce('Upload failed (%s): %s' % (r.status, r.reason), - logging.ERROR) + logger.error('Upload failed (%s): %s', r.status, r.reason) - if self.show_response: - msg = '\n'.join(('-' * 75, r.read(), '-' * 75)) - self.announce(msg) + if self.show_response and logger.isEnabledFor(logging.INFO): + sep = '-' * 75 + logger.info('%s\n%s\n%s', sep, r.read().decode('utf-8'), sep) diff --git a/distutils2/command/wininst-10.0-amd64.exe b/distutils2/command/wininst-10.0-amd64.exe new file mode 100644 index 0000000000000000000000000000000000000000..11f98cd2adf1075b7ff7be7f02ebc8e743bb6b9e GIT binary patch literal 222208 zc%1CLd3Y36);L_fkp>d0SsEg1kVXT|5(sKSK)axtR6{oe*`mS_jYb?m8R!-r5rdUz znrkcbj(TP-ouneZN1x z=lOWZ?W$XMIrp4%&vtL!3f#3>un2-+g+E;vgi3+`^9cXOKa(I_H+=hb!ryxC8BuBS z?-{XR`GadSRV3a5u#gctFm$|AJ(gWne|!sfx( z!d-R0Ss3!I#JTEUky-HAzWMR^;QfyV;n(jqbAWK{*6w98pq24!?z<;=k02~Lz}plK z!tX3|w{jl%Kb{v8%9daT;3OS>hghyIS2cb$FGypAvC^O>UsKLAe$Co7#c)kH0o(}v z;kV%0a{meq|Np=4|CI8E;Goi6A?Oshm`ZaeY)5*Da&?7C(W9Y%o~zltd$$r^;<78@ zl`e-84!Y78E9&VCAL~%qF@=4hlphq!eh8IH^8~SM6I at Ue3fraqbUpqyE9F(ekiz!X zf`oBSzQZgqP3_P_6S5CNrtTaAIptNM0ZO?Z?4v}Svlg0#h+^}zTK!18m%?(!r ztCL-srSJ%v%Fvxd5iZuMzyQVaUlOQ&AxrJBg!=ngjgP$tRkS}{(RCQ|AL8rOj)Gt> zRI<-0Lkr6SC{h8E_YW38BqIXtNJ`Ba&6-Sed{}DooRzgYjCutcRS?yI8gc$rN#x z&o>LP0=$L>GK}F$k$Gl??a`g>bFuy&7 at +Pvj@RXFM)ho7eLPn0Llm#|N0zt>eC%#u z(G{6Kwl>ShPD)FqCCh3$X}(z5cp4EXvGc^mF-h&Pi{wor9UsX)PDM4gikeB5dh+U0h at _v*K{EZqh2ug_ zyW0T>NQc2MtMLM}9rYJ;z_nZ$6^$ld$LF?V;UeOHX_2&8iM;A6LPTyn{h|p6Vf+4g zi=brhP{QTJ6YS5Z_I<3`%T7p49&*9!zHdmImz|g0Eq+$5gcrCTmDJM%q>bx`pDLJb z>!F1GE at gV8e}*4WQxr at A-rE99>&-rc+~Fzl=B*t5r5O1o`L)=-uB?wky at dc0CxtyqAhva3LRcjholJ)}b{`+e{;$Z|l5 zq~-x6GTbg2qCj@>4#m=_L>87R5!pV>$Lf9V{aAVRCR2sA7s$G at s?e8;+MvUvjyzF% z_BAT2H!AH1O4s+%m*BVxgHC*grOv)4ZnY^eXG_BQz<_~L-PtPu+)!K(?WdJu%gYre zyA(yu5rVh-p+A1#=IiSfKn& z at OqhnaDWQ(@Gn7_NP2Wz51d>2AFP{t3~c@&r+mU0EB?E;q) zu$=L!jruG$_0$|DpR#9H`B)^^VbF+{%M}*n z$gH at zqBq?GsYB1AcJ$Q33^;!E&f;wu~D$p59 z*tQa%L$$kMz#uG^Qt_%#iW0Fk0A{(&U2L?s4Gl|e8*fNq9om(>Cb&$Spl|@Pl<>5X zcpaV#ayrETXSbid?+f2;_k~wDeBrf(JkaW203CttM*uJTM$FI at cIE7AUtXE0tD!=~d1($lxNV6=i0wrBMVtrcpwa`=;5S%0Y`buqg*0*oJo_s>+od~g zPyy%S56H5ab_mJ{!9Swjm^S7nMCt;Zv!WJy(1aRfuwJ96bq at 7nuQi__-d9rJ1<+bN zA1*$>5$=m?K>I3|s_X`tS*wYsbmtSdVL!qCbPMB{NZ>yFkVS|OhK7FiFg5gf01Xa8 zV(@N at fcDX|91SpEra#8#OA((hh_8elwh*q)Y{zzK=vuGEj{pPP` zuqI?yV^sMI at zrCg(9vzsQEVC6D}Gl2Qnd(jvwnjRn{q89;G zdO;SnlFw_hw3(}juTuPI at 4!?TcBjRt0V1gf_3GdNbl&UELYOeI;wv00MO{l71#VS{ zKXGFZ zA*nCcyV7Zb6z~c12A?1&U5N?8?X;s{kcuk^U8 at nC2q`M`5D??K^X{3+Ozq)~0v8{* z$s~xx^4~zV%+hZ8E>0HPxd*77j at jJ4h50{pCiej{{5Tm%2-HQPU9fvDNXXW~k=Hyn z)8pg(F{0#ll-P|rDRe{o+K>{g5fyO4e#vgwWCaBkt&l}j?`OGn^}-A+7Jj;(x#(19f zdZH9$4dz+TCQ3m?JI}f&QA#7T59mIf@^ceKTXg3KJnOnd7C1tUkOhJoZBVf5#j-kb z$lAoRciJ^cvRZNa;tr?Od`B at ZJ% z(DgphA-9!S1k4)u`w__WUqRN%2T|tl0<{@EVN)!7By45?6!)_#Nk5DVqc%gm*kjEx z)P7{tf(#rN1P=9wN0l!`ZEdT)AEmuNJnjj&kJ}7WX$ed|P~qLIBG1D4U5K;s^iK)? zm8hj`4rKo9ol4kSgkD+1sRQ5vb|O&Rfq{l_0ND5ii4b{c7WXK_UT{52dEOs+9wB!X z&wAP at 26zd3LEygnWGiH+>(1LTn+5RETl5I%?e5Gg#k{Z=1jm5R%)5DJrtUOB zCa9m>fL$#6vB{uI1Q``+S7tJ(`MAlT?;53_LE6a at G#6|#l-PdS zf)Zc^u&ch%j__-TE}`F?_S!8#w}Vop05+pGVg>Yc+*_`1$nr;S{vkXHxc}*kSl^$; zQnOINrf6&0bUkhc!#r&L5Nt?a-bd>{^s`X9KQgKq%1dmYkG%(mKW}D$kEOoPjmMi& z`SS{=K-6cky>DX7LHofIs9Qn-nR#U?;<1qfC5cL2bX_l{vQ?*lqN296YB!Wa6YdtE zW at J6G%Ms0LN71suq5KrswWQ9P(+kFz!J2_>*IjgGXZ`FvDl#AYBEYn_u}STrQDz|-q>U1UiXa*-CDgi$OP2&7B>M?dgclrsB z&JSi%2P89cdDa~~3kak;Z{%5X6Imei`|+$75?N4DppmtrFPw)VwOcSwGZ6%Ka4e$Z zCZN}$A`rYyFf*WzRtR+SvAy6QH^XBy2-zdRs1~(ezuL%W#m?*_8Ji5y|*y`N7p3vVMwuswt&HU$TJ z*ie)q<8F`c{O%Ou%oTHhGpDx`hog at Jc93H#p{&8n&4~gs$9GLL?zG4kx(;KHohF!x zfhUpiwhQ~~Jp~bc$Y4?%-5oYqv^^ID00`S(@wgoZC7+#wY*~rSs)FQ at fkVe3hsvR3 zEcyY!g$Dt$BS7xzO}#2oF`iJ?5c;x{bD=GJ9!FdT6rMkpLZHO8 at aU^4$go7LU6NWe zR;slQ7POjH0$a-m>m&}QaN3W-zS90oxlk!1&*0{F5y+kB9PKxP38PkWdj=^93=Yh_ zf|%QoPFyynqrxn*60JWOl%#b5hnd5NdHMR}Ff$;> z7g?Lm&SRL7LZ~=iKZyEYgO{%F6U&?+kk!x4V%dKQJfnhsgR9`r1^4Csxo&JPqM(LY z_9yOYzXX~w_8%CS0v{snkIT>^+F+N(t at RjmH$g8_^aQB?BYB92XI+H^Tv{m<1hi~7 zTS>mb!x(k!9G=T at kpZkc16Vos3(9UmG)8RWW*AR_)1_@$^k@$}ia!QzF5#e|r%iUG zpOsN}+TibFpSD^p0s{7GX#8)lsN7jEq|(f^6pFrR~OU$s79RZxo_ zxs at BZD6-##>+pC5j3Stx1qy{pL6$BDG3rUg8+Zgsoz^ysLNn)s^Wf%- at 4!tcAK=PG zwA+rsL%y29lwQ6_!F2$1z)~+&S_Ljj!V7RF&A*w{GqCV>x>BO_~0lM=qH2}k5^;%W8t^7sJ5!)>}X>Fn-LTqd*uYDZdxbiREn#Rhi)WtZ0wU4f#_T>Z{- zCMjA)IWhHu80 at V(3!zf%Mia;3YTa1?_uKf40~I<8?w}*Kzw&{Sk`m~z{S>RtJ{avV zv-)UkxFrgDWW(*~Z1`Qvvl}2Sg1b0^8bdDxGMjir?qPAZ$dMF~=ZS%;WIGWu?L8u~>O?-_8S^|oE2=Htd zK%apy3x_D$_o(iC6c}q82s8BaDs<=fCZhNrH~luw9Yy}|su!X3PvPliE(c10HHLw# zeQ6P?L_3vqo|r}i)}7j9x{s3O6L!WkkblK9R&AFJbj at wrBHBf7;1!sZ$1V6?Y#uJkb*2W%JPevmNihx-$ z6$4>8Bu`~)olF79aSIq&TdWSrT_3V5dKIow$H68m-(~DPz=-&Bc{EjE#J`wmPUb2+_YK|o z^hCogR3Z!C=nA(%w?*SfTtB`IX|8!5B*7@#Oi5p05swQ)!|1e9l%L!Gy#95(K1{Iy zHoU!*mcBt!?ZJ-OuI)eL8UA8WbicDxp!Kg4Ije%*{Q6jK;p7GHye^XzEmIiP&0lo at Ob0r-t*!R11*PYOoIv z$r>I&-dGGjvJMztZ^P~vuzy{Ap at thzN@9^Ku^wpK*Bdyet_I`2N?uKHwlWPH4E5EX z(rH7>6eT?LPf#X45mT<-VzUC|H6ie!Y!~3EdcphzE-5|-*Iji02FDE_zp?h0ifVmy-_U3;MhA?ASsdaYQ^#o;K~=(91>XnI=Be8!f0dlG;MAB z7SQTu at cOId1jpzI7#%f)p6g-V*>eKvi|qx7v}!^BBVR|1lmToIByU zi9gq7z(Z(Ws$^&l9;QX8_1Jv%cx1U2=zatk)4?GTMVBg{rb7PWhR$+7gL0c}GW1$j z6`Twdc#OOez7Pj}f&GCkMfd_`rSgJesL53G5 z%9U z0gIrmr5NH7KPci&sgrQKg}?s<7?iQ>jzQQg+?;_6q_{`MbY)2}9qmZSPwtSpaXjB# z;bR5%ZuM}Hr&pIn9d)-Zpf?SdcdKT{F$Xrt# zu7Bx9ZZx=#h)U+>bMA!RM45o{^wqn#>4Vav)AV^fCmDRHN%+3WnBqbZH;a|X%Y}SB zZZltxo8L2Ga+dP-IIshM-2>5Y%fdOd at jOX@$h2NGU&ft8b`j+c#fV1XYAY>0ciHa; zCLJxA!Z1i@=+4lMq=xg=4qNC(7|$4zv5Rl*oO|`;ggr3^2O7(z!2j|wG$*d-j)sDJ zkq+t3yC`>BUM{i7C%14DW_18permLiXUV(UnaI|jp$tEo%(ks_fkDPBH8Qm3C5fxO zOK4-`YfTv^5J4dZgqEVBG{`!I6JTfux$8Vy6PtCq+;45xQ at MMxa9h10M4K1a80#GLh z{(1>88-O1vH9`naFXB>Sv-w&nu}oTuw%5kfh1|TJW$V5Jh<0#ZzB6(4r{vXd?xe{w z#$bS~eoLEj?sF-~$>`_7Kyu-S zn-hHD>Mu9a0#d~GC@}mctC_a!^`8skKAzYJupT3HxW|v#;^bL1DxfVuage_kac+R# zK!2ogrYN{pZva-t%;s20M^m~0;E3H!6YD8xLi-13)a_<+>~-hMql~D-1EctEhAdP& zIT+=NB!b=B%?UA~ojD0+S_kgC)gCD!hCx2W6#!g8WA`C8QQ6_9Tnq!BPH3-6YTmYS zlDUg{^*ic(pXOd7 at bJmUFjuvmmvuc}2*VG~`Zm+D7RsvA7SF^8^ z{88rQ4)KI9`8!u4L^issLs{lBcku|422aSv6(cT>?$&-XpR at aFQ(||;+6-X#Oxj^V zaWjw=Y4>E}_L++#%zVcm(sV~=4emFXWZ?Nzd=|D9X7I6nfCUu21$gmg9x at wdPR8ctr at b`%icl8+v4 z?hJ4zc5nXe4w^9tM@?r4?>STV5FXA`45ht5?n~8h406Cg2CC~_F59}Ep;WlYG_A4m zkVmv@ouMG@>(yGnd9It&`Kd;=HTm1;C8o2l^8t6{+fp&B;FPPF^!;!v|^Cu z&t)`u+Oj&crtqvIS;Q#di)&Nw5fXL(Lb-?H at bCmaguz*AN$VWDXeJCQN at TG~+SbFEaa=HcH8`Pn zEg8gye;b=HG8OcJzk)%=Z>L~}5}9w(ov-o=;KG`NLv-h!;Z*CNB0Ui at u$eAtyveC+6kkI(Wu@$pLH$ut8W2l55JOJ_6B z0G!lb_!gX;7?i-t=QJT~FS@&p?!>UGaI$$&H=JzawN8!eR_jm0k~k?G#BtKH at SAbM zV>a8T;OHF5(S>^OI19cZ^-gbXWCiWM`2rnG%vbbLt|4ZjYu)!|enZ_)pSh-P3&O=| zVZ0{hA at pm{&lD0mBKAQy#D|j22Twx7O62C=eJmWwO$wM`?!(^{CoFs3kd8d8R)~hk zz;Rr!7t%gyuFARZY#5n?-p4pq(5e0YJ0wB2j5YSdyuE~L^Fq|-BQRD8gvc67gm|nA zOCJIIe<)Yv_b$Lt)45H(D^iC6=;-e!_q`D at ii{=4&b8Bo|CoH&=H*9m at uCQW*E5O| zk*l6K;Yh?5h_oXs`-~SI;7=6(dW|AS*7iavrJlD0hr=kAk58z7l)_I9G1Oh`9TM4V z>|jAlP=ws!bAJf-gepVfKE6v~iUapSz>W6O1}u#0eIYFHKANr|%y#FIzk1eurky`*Z|90GYp`G1qOc~r)+UfWg at D)A1K`D-+ha6dNog265 zyA*w&_>QQ(P);oMxl&xiwk8ch1G at 5i;gOeJ0&`4Tjpkf&4QS(!J23FEKk*&~ z--e|vc$SRy-xI?)7mY3n_1xg{?NbTbj}4QUwrNd zK$cl-YCEpJxGw@{i(C5VxEn($z}MvmL!Z$Qk+<)nfLDx4z*J!H8)3kVuzo|3P#JM{sX2PZBu8rPjG&+RVRCZyz zeYA at gX_|ar((7XTgsW-{f*RVHd%6_g%!|*K^qsL6uP)v+28zFvEFL`2?ddP^>5H9> z4bg_52a9;A?mRvak!lX!p>|k;0ZHExSGv`ljWtU;Yo=&lVa<`<3SEbVq^ROP z0IN^0_2&hFAkRKbk at oet-D<8wciuFRZ)Jvo#3#Z0zdjLG;kGg~xD=fK*iSF%Isl|Q zZykupTkK%<+LHH?mG*hTg+u_n<)6?Njy+%_%i;S_EgVaS8I=ZkKOnn#S2_uom6^$H zGaiGTqVPmp-^Q1*BL&79okig`c at bZu%i`PL9#^_-fg{@-*L_eIX5iWnhNrmK_qR54 z!s_JlDckn`79m?_$8eFJortF>%ww1IDxBb7f{OdyREr>qTdTImVRlDb9CBp(0Aq;+ z{q1xLBW!>|$_Q|osTA&ij7%S>I|tI!afR4J8*N<1jeYIM2On2Yruvx|r%==A#F|;H z?tEvUaU<#P(U!P6EG0)~7emS4BuhGfJkT(5NtS6f$B}$#9eEbRP9`w?#d|pzn+Ia- z$cum;f+EH?*g*u`Dm%he{xEKAt3x9>;p+c|*NGGqDB6?*s+ zft7<<3143wUmqT_e4S@`&Z3AAYv$fxB5%?aLd-^A%3CY0u!v4l` zj+}gmjjf=@stDXsKgdDKG?LhA^s!5%?5CHmA8}<#u%8drEPv;t&j7wC&{-~9$C4FL z5=C_6ByNx0t(9wBdzIiKi1X*S;?l&O0HxM^lBWH{Q5NAHjvx&KKqEalxxrz8q$2RA zjrFy01b12wD5^!@)AihG{h$T0tS{+7+px}(mi`#qNqad1qnEZ{-imSDq4yp^P~HP$ zn?eiP-fCLs#I2ogjsk)oti>j)CS#NDVjOr{71R%6?BelJ$);cKM?lP at Hf^&;C3oaM z-;dU1a)1Fead&b*R6pDYSFioxBIfMTop#X-E#C#b1ed}u_OK8dI^jz8Fq8N2`7P9g z=1TT3j`wi*2I%3>yAqpng9D6RK=s;K>ICCO*Im7(;0KbB*iz7C&y*1tSm|oTpn&mm z0&WU*$uJY{(~eI`1TC_wMtI+lBc3HnB=9#}QkVHFmRUT!_SY!_k8^mI^9 at TfzJ;JW z at 3@XHny;p50xptm$cNm5^YAS at P_$qLaq9DFMqnsWn8HlW_$fr(vE z?{C3PL{$oZe>3og5?)$a=lxp-#TJ at rM`73tl7DBanfv>Ec^~oH#ossHOW*>p?}CKD zYyQmKeYcBk7_BXgyOos2(AHsmFqstd1W|L`W$fqo<1XY~T-Hqjh~J^@8pb=GzZr_l z6}6hT23f?}c(a8EP0a?D`?sHT(?elDWTZq at 5(OH}J zEB#k0-lyFMB_kXDg70s-%*IOsy7M{yFrP19PBuV-x>>`pc-n&$5pv!&k^tn;IeF>Y$uy2nhjfl( zgGYb?%Er0`y*5ws*REC%ysC6sY&gCn zd^6N+8BR4PQkW2`;=0$=>qyTyZ{T*y2y=4XYs_^dO4wi!C66>CS?iUs?X3xL`zZ;L zwB?hK7JUg-X+O{AVfh}qb3dUY?FXGX1A1L8Y+Td0+2!Hpt#Nh*hMi9?Gxloc06=E| z^PK_52YL~dk51^CxrS$cG>C}#7F5t~Z^9RBZ1ofQ%jx}f=M#i?=WT?v7`{KioPXvu zjtwKK9mi;Fm|khLeU48+SZ2RRXzU60M^3B+7s%CVf1QL8>%lDT>7EurJ#X6J0LpLZ zNk$6zTdice+yYS0u{fGAp9j}Qfy);}K6o8aELHX}#PKx#Ue`k-_--q>%o{@sWcvtzQj^?($OIV82i6dEgTF>m`q7X;a|zfu%*UCukO#sW89OQW3E zAU;)Y#%J}hCs6r=(`k=PH^sh>HiX*YSUb}T-S(BlEf$|%!)r-;edsgZBs68`jY8w! zD@`et+D}AJi7O6 at QpDN^oCK+Nb!1 zotKw7FE4jqUg^Bl`6Z-iGJ4T*h)V4rvnXKT!Gjzt2RT9#w~ZLsD8CM)3^r=BNv&%A{}>JwM1qHp?CTijU*iT}sqpr0lyKPuc-in>fu|2jtXXD9 zrD)t?Fj)(*codw`MyT&)^-}bd!y75Hx8iwqSYjJC`QSn1Q8!G7*LNklBCd5G+m63p z%Q4w~d`6_~dd|z$YkJF;DE`9qh)(-hHwkNOwqe2*9kR at pC^|uQ;^``Yjo#MBvpg2_ zU$6^C3`?AEfohq*s4cStUNEun?bVx2 at LXy?hzJ_lR$~bc6Swr1a6@&Cl3gFw?BbRK zGVA|iIeWjP?+Se&sTVWEr$*pRWEYj_=QcUJ9~O^l4spwVxRP1fk7;?2>egdUcG+`e zkVLg~IotX)kOA|a7E)n=*)yz&D^`_kq3W6zqF>WOv`Y*2|8Wbc|Kk=M|Kk?=LnUaW zvjwb~Xyw`#6tyNJ(Tb$jv~+2y<9`gxrK_sH(7k$iw}g=23N$CjSIi9azP> zW}%nu@ubn;V*UX;&30we$hLASmGsy8R{GWae zy{~O0+NG7Rp9BBhR=$27{C8XF|G&~o`Zvy~^lme%f7cn+*-~;=@zyT=Z@^@^c4DPp z!}-Y;UP!dSx!++-uG+-py3?3kIID1W!6f_I$@Smr0w>sOG*4lY-H3W0nY-rOnEP0A zvZdo>%ixm@>W$n9>eCX1E6|4NP$3$#kKBVXJIuAve9W#YKNu9-s!c(2wnA;MJcvy7*jaWI4W8%FbJ>=79VtVtJj0 at 4e^~~YzLo|>?Z{aF1%D4R z5_nO2fRT2V^I#%dva4!h{W|Er%e#DZFhGvN z9?X>4_kc#ZekM8mEKsPhP?5}5O=e3yGFvG7*$L)L_v!WS19D`Wjz9&LvuYpMfBHVf zy`OoP9A{Y(%#g<0xJWSj;4?GdW=Kb^+qK; z*%f|)_P0R8xI0o(FNkYFmgk$0&(u0|Q;QUtWw$hoTT+@@EX|Ty?Etiltd8H%R_zGo zw^e(@@?#)EStIp>23S*zUO%!%vebClkc|?n0d{WU{N5Co-3Pp7$3rzfy$XwZE7D^X zW?oJGSPZ)5yn}1|D0&_A-r1wwXq-0;jaZsx_rZS)RK9r!A+BeWpNro_ at S?~Ux0bo^ z;8E5{m}&wImFm(PM>c_`nfwD*>k+pc4*$)CF9Uah9mX7d3jmFN=0Heq;*cVUMyKBj zKLDK=f~6JazFT%V@><2G$6>H?ue&~vpK|a>n)-G#9&IZc$Mh_4rTZhZbUZY34C54c zNBx$1DQ}mkhNOUY^NJc)!5p63$G)&_wY(<3!y^5T~8He zY_d{~v(!aid~Ke at _ITYNT+`rSqw?Ud>5bI}SiRp}4|34_$gNUj_T|byz4FQ{?I#YL zip(-en<8mRE-Mk7a2!iBJ`!X~?rK`OxWxoCQ%7v!1w zy`KT!#_z;KE9w<@J(lg#uF)*ENlhX&>yBfyBd=|>f3nedPimH)=UrJlJ&;$ob{d_6 z7CQs}dRuiyuxFiD=#Bg0Mu83gqe*5V?tqIRUf?Sxv*E&rVMHi4l-#sNue&1}vq&53 zaP^c$y=dqw?hdeGS}d}=U3TwRBEY$7t<4v4^Qfw$Do}$f0;ps*2SD0N at H@&|AIdb?Zj{f`QpA&E)r$ze_VBVyZ4h)Ew^9FBoy(gz=daV3;^16R*wFww&c8++ zmgYiI&koZ%?T8$$MobR|abBCGdgK=N3Y5OTn^2RDSA9f7Ejpf4#$iKKlF zw_KR29fDM;0YVf}22XFcM+wi!(dTCwJ0NV4nU~@dAum%=i*f`!lwz0GaK+dUDpCb@ zSf3ALEY1{I9gOQj=eUB&lN5LF_Tog^&f3Jsy{2F-}!S$^ir48Vm17*3NATu5XA zG>v+}@ub_!B-tD3?*PG=v2Lg#bG+esG6+Uiuh&Z;cv>WrdfpE)cQedGEif4%F at OmFrl>=f|8yQF4vi*rIF*z8PpL}qm;RObLy zvk+0VmxtBGP#(*<#bZOMP|P1L&T9HJ{P+`xheeV5Q8Dxi7Ope!> zpxz9qIyNsViSmvxT+hhLi>|d<>cTU!FsRs_w`g at A*DjaTpjz#A{1iJ4BQNx3A7{t> z#Q9=b4e4Csez9x|q2W#ke*@m1dwr;voO`F8pX|(1WcGlEt&`P`-rG_U6XB)EP2m(T zo0A>@@B?hTmu;~7!wXyGaG=m1UZu(5CtAE<&V~a;+i_z-C7#F3MD at HVK0l);?yUYt z4?)PQ7oX4Rfp=Ric%p|`wh0&78iGamMp_51P{8jaCjPtI`2EEAJq0R}PcwB#VYd^G zyf`R4pdQq?7u1wb-w9l#?<2 at 8Eo!Yj at 9dgma`*vuC#JBI>$9LC at fSIKB9YTvB zQ07aRIUA)7o11~`4DR9%P_1fL(G~Gb(dA+u6?)M*tZJ1t%*=XUY)wVkKhjmlu6xt-CS)JsRvD)4v>Seg?3GlE8ZUzd_{c zFT3M7b{ickB(o^4`X2IG8noAk^U+{SeW0d{=T;xmDxfA*8@)_nHM0An-=aY~1W at iW zMJyj~!nw9gsY~f$V8ageAES|2a3t(FzR3X~d&Mo8I at +fSou17KGEhm0Y_5{0WUft- zE5`QG^A)hgf;K?nn%xOloC;g0ad@{)w6di|-tY#nL}ogew@?btguc9* zFWe)%PR?!vih6R_jSclvZyzK^*5f*F-gnZ)vNdqUntW`pw)Z?=g2GKlbt%J8iOs(b zW&SdIXmni)O1)U_MGOM#LH2DN^kG}R-5k6_+IVR=DDIGtUb4<^(0+NAyuN16GccAC zIedRJPEi$})TNksE{qd62>3{5UU02Ima>JdvF&(XHF4%L2N6J3JUVn_63bo^h@*FN zxfDHNx71gbJEq&)Pq3)F`haGsj%wBwRm1b at L*IuoE^(?oH~=Ku8M6LnX4Ivi?H4PA z9)P>8YV9SM+v*8XjoM-GnW0<5b2GV4#(^}oWH*HK`)d#UlZxA=_V;jJhiCdF^%JwC z?(QjvZ;IUt{GT^B^N~8}CxGVWk4fOb-A&zl{q#H^SAwaskk7h~oGmS=OR*;;mcbli z*&iT{Jq3vxvj=0S3WgdgVZQOZ(fDQb8|($w_#5P;h-6!NWN z7$vK?b$9!Ikga+3q!?}kUXM+JafatA36Tc;BB&R8h-D94Mv96FttYdG6=K+_I6v|} z_lo(Sd-G!O6Qk^$%XDt7=EWo58YT9DR*LqTX{nY#gV%l8i+)g(Jo;e;aRhkbiWZpT zn2BbcSk`*!Uk?QNWIM(wqb-h+(APO|j}m$Vxo2cG232;zkPN!krSu}(XIZLPW=b>I z0QbXwqU&8LWk`6cfldd?diA1BEV~P!BCO1V3uwn0%wu~AFLfz{ARSpx#E7-D)9ez< zMt}P-ESw$(PV0M73+G$mPHnLSr|b2>;qW(5uMbTH3YAUh%4+e^;ep@>v3=TGUmQiBoLLEgetPj+079i9Zp}}Z^IC{8z8VjVas6eqe~~K^ z%5xygcSJjSBE-^EWcfOeCpoJCof|$QP0)$I1YQxxfZN#CHax&%mBydO{tCD1`GMl| z>&cdCf;r-D3@%2N|D$0wt+eppeOCUv$M`+igMQ1ap^4ZHDF*ZW*M)Dz`gNRnqNifHSGYm1C|IpW(P0zP2*>v3K}pBDJFdP+t%mV}?p zr?b%V*Q>RcEek)8 zBh$?WHGbIfEr3hVEK-9ty3azZs7S9+G~hjO;9xq*;#N);^<#@#ZNb at vHk*u45i^wJ zdOr-!lp+NdqdV8Pb%%2foWsh?7{F~h4c(|8r$D>N2yq!&Nikaau>Id?<$RY`o>Z%Q zLn|qgyUNRUSz2LCkyJCwf7Fs6%&T8F3v8EukO$n9WHY at nbcVDOFNGXDIflnNCZr=U z$ctmTF-V+j0o$f-x$5mh6E�$ncM+W77*{oyS3lOIen^6SL7 z;b(Yg1X($MxxvQbmzFr+oh1K~vuZx|fX at OU%@CAJ?7JGA`31k}dIKwjS78=Y0|w35SW z__f$H;k>H at 3;35KceRS;`FLvQ^AwMY<-v7gmdfE at W2T=iE9^31C1z#usl>n(V at h8x3mL{R*2T`4T3~;m)e#d{`#R;IaqB z7#L7xRiWN>Dc2)Y*#KuRX9W8i2sgCd=K$)<9 at 4LShJt>59waQ_p)SRVh&&;N=N7QV z1u!}D^UkiliAHzw+&4%#1e}%?H-%ewbLE~!(3^YTbWg5~=XpLy^ zZaOj`n?#p>LV{Ym6j|Wj~|S4Qzu31MA&(j%4>t7hJyu;Ha_m;9tW88U|LV%b}rp_mC@{3`?A>yCO&Gl$5g{2{n`U@`_F zSe1_*#F%34GDk2)oBRnzpS7LMh~#dzIauW6sgV=l#h|`p;|cTyFi1y!0QUh#VV-fH zZmGw1CDFS(Z*Drx!0eGQ^jps+EZn-3!Q4TBvGgIVEpx!gP03N++4`?WCFeabL at e9O zUBtI;faaUOK-(KIHv}qg%2%WcvkyO3)R6FcxCV&*Ar2(WpPX0!NKdwSth?TQAZ~R> z*?yS6ie+&Q_21(3sklhXLsLy>6TMOqa(>GcB@*ebt&{=FgGS3l_ zQg{?5KaQRObvG3*uTnGQ&Kp`L7KG>0+h~@=9{9WyLHMOdiOx`u6+pJ+{!TiOrp4=? zW0y?d8R9j~Ni{7S0>HH7uCoUxlphbeeW0j-xcK_qewo$eu0zt^`7x39)L|lRi{US* zxAzO$)!VO=S#79?REh64uzV{!+}>P?T6b`Rdcp&$Pn7pd5v$#@-?CVi!i&HoZ3tPR zs~+kT1=dh%|n at UA~V5F3c?olQL0LkMlDrCjb{Ng%!|hND3~5 zNcw(-&CkRogT#74c2!~>gdckLb!#yaYgI57f1V!(%3QsDumEbO_N>tFS|?UaN(hycy}i6fzRyQMNc8oxe{%rG2ewbruZq&{T~79)s_B zYsWq^L}oL_D(oKTR7pK=U3-hTr59Y7 z2UoR&_^-;V57{I=8t-4$5VU!-8{)m`C=1EmAhQNZuU7PGZ{GejJG|lEi9Jclv*L`8 zP at ShRe=P+{zn4iLWY~tf=rOwxa;O_*``TVa^cWruq24w;xPUE^@uZ+0!_k^;R8OQd zm3Z6-!i!|}f_=>>HpfG|;y2CosMQ at H>!k{lPmfA^Rd|kv7g~E#iOg9~TjSLcg67o8 zI<+foAm(c}xHsjej?M?eYjG;8S4^V1VZAj$q>(b4I|Qk8^d!^^FHV)ibBEOR=uP*=)pCaJOz&E!zeMB8*?{9f(8p4{d%O#W?T_vbaQnF<;2 z4W{dLS$_9Ne)myd^rCrIr0iFGxpZbBYI4P0r?{g^w8N}K%APi!6d88>*FAii9+aO6 zIstlI{*;>jq2MRNc@`(Nh zrLqrVsd0%?E;UCC*;s)RzfR3b5A|f5(I9gdD6AB9I_NM5Ac( zglH1j{`0NG=A9JqD4z6Yx5?~8IPg+{9rcF;@A_F_ofMhlP$Ey%N#a(JBxjZI!q?%^ zR!~^K_C_|CO3wYs%LCbs;9rPaaV6heBAeEG>wNAbq0c0Cz1pBlvaZd2l#JU zp;Xy&Kv6ZbH}A;W&!oI#poNZk%MXTb^s(M`Ryy8 at SzTe1PqkPfi(h!dxgc2T at P$*h z4{AOF#+0{#x97F&Q`9PGuW=2ucT{3WBnE1B`cYPAIVf)Rd+Xc>#EJ at BVzTI}EeF`Hg3Cn4*5GaHzIP9@> zv~}*J;6QP!8TGXnl;Cbc+4sQc1;G7ak-RNZ}yzYN0;lT5>n_xy{@{K^(rjI2s$%g<3gn-L{ zuTu3|N#9{P2-$05d^~Ko;VsFQ4nRdJ^S&hGn;iJ+Mw+xyM}mmz34y<{jyXsZQQgNW zoCJ4fh3jRwM|wJ at S1fti1hWIi_`D&fN~VY9KbnbhI942KEEk3XWCY51N`N8E20hF5 zU=h*@zxU>VF$M=R?=vIofWpDF0LzmZNP}G!OI#+T8Uqq4Ks6u>Qp(tFl_1X5q$$@! zuNlLxQ=%m4+yuJo zeu1BJR(t at m#^4Dt(T^>PyINs%){-Q?zMnP5=JgQ-P#KM}O7Ly;QQtjef^-R`?;OGO z4N8AY=?j!TP3fbQewWfEl+K~lL+K<+hfsPQr2?gA4`ccXrH3i4r}P5#C{sF;1Lumn}cTl>3(nXXOQhHYx_`gGG5vBK1dLN}FlrE=~k2ly%`IHt?I_M~-sgz!! zoK{Ldru02ZYbmXy^c6~fLFsdpmQuQg()%b~M5#jQ6iUZZI*d{WrDjUMBwYQI()9N* z?M3OOCQLu0w1v`xlIi0 z(-b|r$y8PqIya>{WK~$SCL7=Nxb|O3i8#DW5J+D}eQJk&&3tjoLEko$@y{o|egLvP zYlbQ9N5Mi2_VB|*3_HLo4zPNk+H$)u8tW0th>mE?7MjN| zEa>Q1dnxpgP|}Ln0}tj`yLFvYkQlOjTdkof6d=3Gtd at DxAqd#F at _~7J%pcB-k)Er>FGzr`b{|K)$hqwiKucB&XE2D+KOUd1dWJ5iW)O_ArcV?@~lZB zB?wf|Ngy?5N1PJkOq~Qu;RF)8bn$P}NAA^uBW{J3kwEEwWNpX=#P#RhNOpg?Pv{Vl zXRUtKc!@r(y1tIn2WU~+l}wyI(Ju7y8~q`EI}pFEn5SVL62YfoUJK?mW8Qv%iztNT zo9DoMf{YUg$=v7L=(8Q@!{Y^BmQGGpgQTA9klF;$$_Ik|B<8UPiVLjm&3-V2i|hR( z5BQ2}X0iT&gBcE{*cHocdz)ktgQ?;c$)dKjDnIpG>iyY`KGs0-gILUjdQLr^8NgkR zxVh$l7S=mo7tOp at q;;Fmf!^T3y$;vNC?d+k;FC0kQ$lC)d$NAE-?vS_LjN`dER8<( ztRBcdKy!4Pzy;vOK>2}S6EX+Xmhf}xp9H7C556t|ITLtJW at pY&o-+V`ct$GEu)$A! zlj3e{t4;}y*<`M;_9?^f4(rvxQ^6F-4V{Yjom2(+$Xy^mtsp?FfhWztvi3DMc7lp% z0?;|M-63u8sa1Bd4EKvDeH(sE-njI!fB3x*js0t5Kp2 zj#!^Lzz5(Yj-AgBz>{6e1MC2OfHpwyBnI%MgQmo3;@X}9C{Jg_Eu3%ch4iTRX9?>4 zIq;VOs_p`hafJqJ_uL~0Z&KtxT?@>kUHEvo+1*QPw(I2P<5^hRM|gA=7?dz)ZWo at o zPIGtGitkY>^@lw_#D#4oH|5Eb+LpV~$q8g>3zy-sUt;-Vlobc3g5?^vlvh30Q>~f`Q)p_~ z!pr+3seOFW^EMyDS#vw^Gk(Sieta0*Zt%6oeAl!{%{7K+Tgw|B>!n7g`mnv(R;Dc3;~z^kC%1*v396}*;2$WqE&TcbKQt5Aeu at IwLtJ!uX z&)H|YSqKhbrFb4}xvlB%AXV&I>q>kMewwZNZ{Szu&amSrzCjaJP_eXEu~<*-kHyrs z9Urw(3+7ONmCBfFXQZ3rDWb!CPJGg&kfh)+mOhp)fDytZhx#Ixyi8{!Mmyi3=HK&J z*nNMt$B%LdJiS1Rm&F3CRdc{R#8a&-f$(hsa5Asgs+R~toq&=?d)+SxrB8GSu}^4o z$(dlPq+T|!e;-oUlZU;VZt^p4OSbNdEHg=JC5EX at 8`ANZS==tT9XHi!Lsp|+^tQzN z`dF!l?~UCpZnY|EiycUAmp6 at 6^qP1T3l&JEm&S!`>Rxla2VA;CJ at _-+z*e!CUiZIj z;)`I|zxezN4pb-;N at oS_(%ZO=ES^)E_h>M!G_MeI5QO-R(%ZBd8G(0%(y?@?FP5 at F zsSrR=3^sC&Ilwl1=m;M0qNVn)WLA%$dNBqgv3m7{HP}-U-`TCkvf at LejV-u4HaK32 z$gW>w_SEawS-|L0qYYBtp0xwiy>=<@{F)m=*1X0?hNye3faB00Mc?IVtF?!&hdZC> z{U}?HF98SpvgN#ihKTeQ(BnxcWDO02)D)V^YSk8V(7q8>L+QGnU>=T^@-98%(gxj) zbh-huv0%`q?$hJfc{bI$%x%qs_4bb>%OxVLFN`gjgXZ>+ke0|afmZc5#5AlfJ~%;r zPtFq^_|V&Ga8z~`o(}E#<^%HZmA&jL{#Hh=?N__a0v#E|4&bSmX+5@@g}51J|AUul zbtVVTI?Ex^08)s-}jqKbC< zQnG1^4c|JSf50}Xu(U6C;i2lfb84Llk4hV;)#15k__eK9YEAh4yNBT(=I=Ff0qR!n zGSe$F8QN1%3Bop9>(SD|THc0q(ze`=4W<2%3LWPQ;(pPKk!Rz4*Zt7LHS+MNqMmPA zx1L at ZfyO}so`+Tf$*u+_yZZB!iLPezt}=Jgw;gaeA#9p25RA>W2l|lt(;|Ub3%jC$KX%aiEP2|Ujf=S2f-_-GAsP27A-mWL= z at pxO-r1iv;Nb_peR^sB#g69&i9>GIA at cQTYrHZhZw$z95F?_V#nQgmF?-m*Y}GUX7^Yc@(oEOES#HQC_<=UuTPAS+JU6#f zVO}%WKo5SAeH;pu9~A#DYi|M{Rdqd%&q at YJc!OpnKq65FL?Rd)(S!lXgBf`PGZ+Ol zDhe7!6s!?uSZaX8muM!}r_^d|TUxE!ezn at xY6}4rvp^O=6XM3=0=T_lK%z(%mc0Kt z=f0UtLg4Gy|Bugy%zO9Ud(U0ZJ?GrBJX;05bEpk-y9x9Ec(8P29IvbH*8oDl9qS_M zB#@x~95hw;9; z at Ha3HGg;!X>q`Td*n)QT*=|4&nb6N};w73RwZ>==Hn$3*s}6VsE`JZCqwm>^VBmIY zj+xbjUY~=);cOF^&nIj`Ax6Zy3Az4-p&R;zt_`iQ!-`sATNs*e3C$lMh33N&nx78; zQ^*HxXnu0|G&B(#gyTGhyAj7#79~D;gJgi$zGryjNR07$Lvk4Ky#XqOYN>uIkJ0pa zZhy6rxEmDQ2^**PbC(933jzsAPEU?Tio8SID6iY%fy3P_U% zG>1X2${XT_x}p9`5A&ZhLt*zSr1K5#;FiCs{)RFXhdkfJz at sL2#a#n zuW73eQ^(vyi{E%tbWN(?iLkivQiNNOJ`scmZZ+TbXxWf<1tz{ujr#GjmPpe~8G at GPY`~S&g z(9aKM-PL9?Y(cFnl(|---A}{{~{| z>;>voaqRsP1IktDv*%@&8k|8Nx)YI4YJq|9w<}=v1L at SS-h|2wX!i!Sqk#V%iW-Aa z1)%e4!K)YprWnuofK~nX--t at A`PxK75&r(i9=cabQtKb#i)-)F>(GAxd3 at fp;ulEq zR7OP{QpRqn{jgMh(oBmqJVo&hwyUoLnImjooFeF;m6 at 4Q5;yH_qag-kpWKBe0;7o#?OD=Fi&iQ_$l?Mzdd-`oX;) z?>R6PvHbKs6iH+Cp3oi5 at YA50p2J;^#fEGC=9RD(Jq>lRTn9?Z+y>yOCjO63Gv$+w zGwofz<+6F`HxhQfK^jV&r#j#Gad~*asY{^8RG4%D at t55KVv<6C6G=o-Z7=!r@}Dbz zEB96Ve8e0NXz^X#+b&$&J2&sUu#e;(0O{%ntzKU06qJ0zG^xx>EHgZBXxY-=!N ziF^1m9o3)5H>0Hb^oqO`u!N$Z;;*_Aw-MD)1Vw zv?|yxX^mc2W5ALafb=$`Fr5?3LW4>DL{}>QX5-w)3+MhJp2w>)XT> zIWSRec!8L0)}davgR|Xre?ldHb#Wt}$*V}Db)m#&lG^0}b<%eTt6+>lE=VyTKaSj1 zoJ_)wra-o+ECmY3(P_!*;pwq7$#pSpi z8JhEc%oRd=`c+wS7E=d6VNLll`1c`lfYA~dhDrzyKa9ea>XL5gw_Y0+tK{L at Sb%F| z5$Q0-Xi)6$E)*6uXx{c8eXgn%TfOYGSNNAEwu>o=be(c6(9gr}cSib&?~uC2t;=3^F$`OQ3^Z=neFXT`*HSJ#=eLNazciI>ruU}7GS^x z_G{*J?O at gb1l9DNQbyQk{@zu)Y@$3XC0)umgEiDc*TN at IlSKP_%ku~xclDBayQC~hF-fwPuoOr-zfm=>!i9oCx&pt1oLOyk z=81l0`4JuEu_h^FCt%yljo7)#EBUn?)8${^{|bK1fH3Q+i2KKEER#`-w|vT+F-A3! z3Irw0ybUwMltF&ED`Dv$fc_~ia!Tmm1lCrrE>7-^3~`K$oGTG2QXGt4cSet7^>veU zFQ#TShO2P_{Dkql>KlFh+{(Q`W4uLiMU^!G{e}f<;;;A>D2$;IALV)*45Ak*#&Hbp zr!jnYMQjX%cu!SQ2W#5^qvB)Nzi{?I{00 z!)=mW2hG^yl}{%weeVCsP-gz3ub~`~dlkj_1Eu_OT{?%EChNokU{1Pty!ktc=aT+LduL at LqHV*pt}+LcN>v zJ;^;J?ons+SBlt*5ay+r)G0hoq6wa7Hmc)!LS8H(4HJ~t&DA8Meoy^6NVSoO_jPnp zQ&~k{KBn+Q^rGIn4?BC at m(k`^ewjTpR5ZT94>U6}Mj4;r)SEw2jlsApcAyvjODpCB zNhwM&&J0Z&@5it?klXwb at zepHdyt;{@fA4h^lIWQc?H+^09Q160c=-T9Wj7+>-m at I`7a`X+r9F(D8Ms`OP`fOTQW(%_c1=maR4vT zD at fBT_-J`IKq!{Yi{Vc<(4)(EnGbbA$6tDu$>n8odz9Ik1vo=Kv_WNcTqY5lNbOCG3XYiS6V2NIcY6;(L81R`!+n zQeTNrj_+%SuIw}QZcKG&16JsX6!5YRwrXv6pe4Msix+IcUU!^^y&f6juITS6ZA2G1 zCSV-gpN}}D==pMUQ zEUwM;ln=$?P!-!(*0QD_RAgLH?`0SGL#TFEVG=9)i{-}h0mLE!i?VxZ3o!v1|8gI}qpqvD~Bs-Qjh$ER95Nl?rOS zF5|LU+77AUq;i`PsQha=Fs-IfcwT0RfpnLrCP-S7wi7 at G8@*7K(crz at 7%}@V8LWGk zh2H#cv8^)Q!d75MI*IEF>r!_4bQI;o7NPW2D?RlW zN}uOXDMIN7RQhl#hM3zCxI|QDS>3MPLfO-pgGoRF3gye;S?(MXe&2*a5XGE)yYR%_ zRKrU9K;fxd;L%$kp{gL9BJ7!lX4-;2XJKTxyTar_8}ad&hZ()*kKN at 4zY)0M{!-4+ z;DNBis at It!JTVBKm6`p-94SS3%E)U?(QBR4klm}w{g!i7S;Ifn8Af5*Li%J2zi#R7 z&|fXoA>pa@@POi=vF+jAC-s?AEH_mjFl97U;!z{G8^WLQ0rU%{ZOFr+>i~DgZlQD+ zA39TD9{s&mL`VF$QJf}hGI`}-dVJex2+a4?J1iVerNVFc3TjktO4fczDjx)Iu4>__ zev~CJLXot{u;}1?kCrktGT)l1Cn=DgDm<|l-%3`dqPmUn)J~o&wJ5{Ga#KlZM{HL` zb90>9P7T|)xGKS&QRQ}37nw!#=WJ)V)4->_PH4E~R2C zx5INuCm#`X=Acz?Mz3;_;gBIwbiFS;@ek|X28-MOOE(n6r%NmTXw zQ<@fDM%P_SoGX3o9%muCKU?A)A!CvcbUgIjK&e^aeVk6L@~s zRA)%{8L#zfiB2l)V|C$g&uCg!OQbfssgg{wO@)R>ahqyRLt(Gw|EcjD%O5R;qKgL2 zePTNb!^X$$sN{xTHCCLf#)pYclk7OO58w at RCp7RbG#RfBS0M)`A1)M1_Zgyd^}0Sq ztMHGXwgqb2sI!3lR&UJ4=H=X(&UOmVR`<~iGZ6|2ycsZJv1T0R>o6Icfigf^U+v-U z<+Zrwo{Syg at x*jN7qj;F+Jn*gvcY_Xmz~D34==$VeYBo5#Elj%N;WG6vmQL7=dl(2 z5~jyCWbJ+UGmxOHy+UH^IMsvQ7{a>|$n9e!zlx0Yv0@^1*c{C3sj%TenhZp&-(2S| z?@!$wAmt1TekGDV#jo4VZ43xVI;1_^Vi%%~VXhRq1E$=X zU5Z!6+og~PR^&gPqxJsRbwtqY_p?r4$O8%A`$7-G(c`zfS&N at F00%q-C$hhn{Rzn- zG7_CF7t6 zgOMXQj)D+e2=xiO0KrFwN(Bw-uRkRRe>x*Kpx%JYIItUb6hhu3F^>&f0UcrG`J at Eq zCf72oouqltGPjbiMTUFjNlwG^p%wkSz_e&}Xq}vZ_(0Tlddl{%Js>GJw8j{HWq&k+ zA(}(_m>>QJ?LPLPjSqK)88-`Bsrd=p%%D@@l*o3HX7O>a`3KRw!)tEI?iV-?Btw-b zciba9l}}SNn(%7JWkTt1Y1ej$fs5d;6qpNt{ehA2_b#FAm)!XAl${5r{hw;0mc5d) z%o at 2(YYbcr|I^^VfUYCZ{r8(w!-xjoygR&xu<^+^9}EY`#YCV#{I{X~BqdAXJ9yoH z)a#zf>$X7M<~Iw&Q!roYj=*xsypMNuu_ at ZoS@1OJvcPO%Yl9c??xwA at dOgZ)BS!A_ z=OjCq4|iwZZWKzhAZNuuF9V{hw%q174#B~OS4cDOizQ(F!I#7Bi2cm_!pBG}xf*!r zFn8nHvzr(X_)Rg4MmKNX75<%(_vfClLQl=|;YC2~AdWn26-txop`TEC5=I%e&?rnT z8$u}56tK&k7NM*^)^uwa6%*fF6~3M(W|eK=THJ1#(Orf(Lv#Ti#Srl{43Dix>SXIc z9D)t+oO1}Uh6*#Sx7u=JYzTx3bL=gAGFE3R)k(v8_qpr*)X+zrlz_u(j33rl!swq@ z!$@O|0c#{3$7*)uo>+o_3I51zc$oqJy#QHc4eHl##{*9z;6o62px~_eq=fkSjAUY^ zUJN&j0(C!Y^0N244{h}Y?hO at Pqj^Zv>vL>sAPKiv_zgS`sv>5LFTwm{q^$SU-w?aLVYpJZcYx)Iwlzv-?y?WZ&31TR&`#i|38o2YYV>FRBObl0te%18KE zg9nx~S5X`O4wqR1T7KgINv)CS=ox at GlhYa^S#pKbw3t4CmsR=L4*mMZp}sY~TPg1O zi%HYQDU#b43EiJHs)ulZLuPWY3N1-cOP~ZZ#Qd?$aoXcAzV)4zC&~9V^NTrBK=ALu zn>jVSJ$fbQ8Xp_LF&FbIxtVk&r?1auQdIe*-m?d<tzh}JT0Q_%Ohpqyo<=?~h z;2pOgm2K&cyE&^-y}4Y!*>2K2e+N4E!(zr*FiW>vh~)Lk(4~Ux-G8X z=gYUNcfQ_wH at PY5x}T6|eOw+|d%04Eh)f%k+{@Um$S zUJjB`fm_&^nlF}oiJyy$oY}g(JX1#;A~&(Ry*3I*o_y?2NWK*84PF35bym5_AZ)bc zkHYeiTd4_IE$WZ&owpI<&uv7nzAEPcgGd#??7NPGvG)KYe`)*)FyzSz(SAa4>&(Y_ zKjj0)$j#AS4mv;h+kY1L-hF&^SKmNP97x;bzJZv2QXfaIkL%-ok45`v>h9y%^MhaU zv%vT61F>)*7n}o#iZJQckj!2=WoQTuAkY6UVS~$*f$s(2Q$^x_kh}d(tJMj z`?&dhOODQ`t$RL~pC5ef#GeDccOM6z1Bjgix$_)A>_4fG?76r;Ha!~cqrJP2PdA(w z{1-Vt2Yl~7jyngC1dhgkmDBffN%+ZU$xFYBTeIJm=xa8i$64}9{dpU4BKv1HqE}zV zbAY*pgL(2CU~c)zeckxmxW1k$*86%(kG{TLciu+y|GACm)z`V_05gPxdGXr5!3_Dy zeckzNTwjG~Ux)PQYxDN=HX`lkHlkNw?>`5abiPqGT+{bPN&iXv^vh at B_UUD7qx&?y zd!HWOc3$w;{w(mjvZ_it5h)8#SJlb~*P_Zy?|wgg4!}AIwspd}NwBQOY at ai`CvZ>M z{-9MpH9u%2-?ywf+7zAlVSCm6Cs3-4ajPgFCxd#~26wk_ckOw%^~L9CU&7CBpRl1( zEj<-&VsT$hjQ#VmM#;4!kdE3ex!RY9Njvlt$@N75b-Ds0q>L}Pe;sHaH19&*ymI%2)Vjk`#u=~c^okdK zoO=x*AFI3n%a3{fJI>|iWRX~M#11r%pB;oI@=#R4OHd0eRsD8EjEgSWhMCZ!?2d4Q z0NG)TzQ7=2`y~0KNqCgVkkEQ(Cf){y{^QK1-?yE)7WCo$y3>Zg$^)7Y(ojb26aID7 zXAXO^wC?dhozv}J_P#fxPHI0PDTPK!?y{{I?PoAG+c7#%MHg!DHwoot05^*TPN&ae zub;M~B|?p~`#VYa*9ot=O=2I5+J|LTYp&snwLVu{Aj8kz_hr1_!ynAK<`*Lb;XGg! zSsb$8bs{hZs_T6#%68-D&s#D^s4nWoIONZ~7?IPu&@Q-I;mR+AMv(E_4gBcFJ9?bY-HN!y|$RoSh>q#b=A<3r>EPn at 6 zu7wo!$<%Y(3#$*O8n{*UoDiA)4NXO>>c~fGe)NNje=yJoG(+A3`0nN?0?PEVJ*dhV zMr}^svEg{7rKY6DL`2p^@{oH8;?=6M7UB0OTKl|P<1^3#B|gW2T8WJ|0nfXxZ5o*EvpSwM-rv1s#kq z0z41okCxK5V7bIy&PC5kn4h6-MuX;}#8+sAmK1=f3WFB^ZL z12$5GHO=w(<76ka3E8gj*14!N$)BH|gU>v_y83clMGdH&xM3<^P{R`V at 7-_n-=&-Q z at 2t1zm;0N#L3koAA+}` z|A%(~=xr$AR?>FjyegA-!B1pRUjrsjuKy<*z>R={Tj5E)p92$6nXXY(@=TiJZ;&3v zT5)4b_|%Qk;rP4wTn{_I_PJRntpW9)vkiuI^5jc_e>V!H7^0}&ItX9jqq}^R86E)v z=~|FUPJpC2t}ElHmF`>cs{1AWwd)`FHJ!e8kl at zG+OQRH&Z_th`1F{KV+mK+(n7WW zWd?Nr6f{M?!G!q|6Ew$xQX{Fz9vO}FN`eLySkmiMa~=Nz at 6XWt26*?mjt8wiml}8* z&M9s}&3`jvF!i=X{s2?5`)w)Jn2hEOnvdayr1KBYFm1+geizcf z7%pk5F+7t#?IJ=Ll2{EnXzkw1UtCAj_{vEX4U7C{U?u8>U3wgHvRRqD3>(Sz2CeEp zd4cJZbT1oxvmOAtyV8Ff#uBpH(={!C!OW$Cq}+50*l=b9QWp&+Ie8vHf?rh*G+|CL zCAc at MO5TJhbP)2Tq^7A~Ulz at Jx1RNR%xb55xmeZ at FzaE!Y3tP1TghB!z$OgSpgx9Xsg4DY zqJI6_^<(%G8OA!~w`n8{K^uJe zFC^|#cjAJ@%dz#&O3oDEOvG at cI=_ZrP=s&d_9kdv1k*(!yOgA(YjGQGEWvGM-BWC$ zOT&D!>YamnJw9i?T6>R?lJg|F$eCvdh!Wb08K?r?S3xE|zqiIqBe%rNUc2vgBty*s-?zUUe>7*ekC)?J(^y z$gLC-*DB4|U1XFUAWh2-_5<4sbWgyl2CK03pg!sE#CfSlX4e1HCTXCo{3NSJ~-Xs1#NTf ze!S44Xl?4Jcf+KlO03LAuI$Tr&C$14Xmsffx>yLfJ0OKT#hT;(wMH07)}c9WS!<*Y zNSedFmfWn^%d}vkd`g3rzmwmiF$Fu1-V40{;@%5dNgMa}F-hs^lMdUTyZae<|JzYgx$-d%o0CNBYNzxhsCq at mmG{ypsc*x8X5wEF>^u$e&sJa at _2G<>$@MbakereL& zp~XhMA=GoQq8r`_-VpaYsR%HsW(h;k2AmybxZTe#->GQy$NbBpiL|m}&Mhk0F z^o>^3HT1?^AU!&r91WE{hjLD3jTs at cdgW%ov58LA`L^yB)QC!YTJe-|!34MROCuVI z!JPkl3kga+_wTo3Oym%U{Z?k~hMbCoNf|<^3a=Aui~8qItP`HZ(A>pSCI;`(OE24k z`skC~dXwGeJ<%E>HYl(bIMw=!go(AQf51}X^-_}w=Kop*EnWwT?fawOg{=v?za|*R z at qX&%=3Ekg0(qdixYkx$?`8TL&`Z0b0~{UOt^x65dw|{|w*lyIKUA*jRmGx{Xr9_D zarVixGfwFvEdvXag^LDEnIyvLH~At3)%*_b_9}vzTKIu zY2Jdcn?3Wdz+AH-asi}m>`t2mX`dpj_RYP at da|0yD+b4*u z%AK*}_wpgLxu&|qBw}dITG8Bif7OGuEK6;p)&r5~a~Ae(Sy8>XC4o#UYT)EqDP~le zYuv7ywFw?&j;S+ZcAJ~!PODJXVnEI&uH!qZ!z0YQdB-nP*m?ThpOyY=C3ZFe9&+Kq z?P?aDEHQA^`^Y1#b`*_sg4Q56oYtynkkLKa&m80XF2wPyhVgtnHpa8N({PO4=4YIE z52I6-i1Iki$e(a+-e09ctIQm^iSlVGoqkcVaXH*LkONK?#1a&?LBt__x0 zEs{2QkaY=VeE|Nd2m29!F^whN4zI8y^7PspDh=~STSG~1NM1F-+ at K|015c4eS3}XD zj0Vj*2J1)~P2f`S%Ts3^+Mf0vwyY^qs4#Ysu>h$eMY3n at 59Qm{jD8#;6#&_qvz|I0 zOywGe3xKjp_p-(4_A(o(D`G?0iezq$?wY{zO@?5iYr=2fS1A1pB<7?oUWLyW7?BxE zn}IJIlWw3ND}QrLfgd-dUb%`KW#)y at 8yOIqB6}~G2_GD(JVTnD|5^v&FXYHZhMaWn zQ67;lvX3H8{C;F5<#QZbX(EkvH~}y_Iod$odpE07Q%|E4v!sW4R_nbhsYUnDkKX* zua!mp-Ei{DU5i%>@L&?zc2T*%pGUdN2pRK`PRF|@WRWO at J%WG@&OU-6qS}bgJ4GSC zlFwCwkJCNa{r9pOp|F|)C75I1&Cqye{StoI4XL85dG)2P9l{g+(fq~LAUtJ&2iJ}@ zBjIKB#TcLN`!n>V=(uag+GFsr#)kR#qKm=lHryK1E%-NquU^-VpxtF%1+@!*s%5p| zw;-SXzP>i{UH2Nr2B)j*f82k7#Qg0PWs5FYw2CDl`z)@gLfj9nn zSZ-8n4$Jbi(O5)%19Gn82`8rLnYw)?Qu>)(^5V5q-E*2eB~oI;lx|OneZ|kq(#WMJ zDTJn|FIu`4LZ5_QKwlq46RbGj)04R_l1swcVXQCDl!)Xf>OmC*PpBQ z7^?p%wVOqDQhMi>%Wr)Dg$7TGJ2WjB2)|eFMy$NuOMbGF;@prI)P!cIimXMtzpD73 zT^jmBSFCSA?lR3MxU1N-RIeggA_=0o1*41P7>iDb87)%5j?iqom|-aoO-m#nAminN zbOXOCNs_p4bqNqT9&U)r62etiDr=g2>`JO7pO`LFAl{~W$A(#g#cs-xqT_Y~Bao`O&2I?f*b z!h=pqM7h%#ybQ=JpE3&D0VMhn^~^PB2~Z`4`k at B)#aUI630S;ZMX_0Q$FFb29UmDk zxoVduRFZnX;AfU2bl2acp2#v7dd~wZ0+wBOk%DojR^5nlAH^EffA7_QkK(USNjh;O z3go?Pw_C}zxdsF>v??#qT^-|={-B6Q<&MEC`c)>O6se at zt?2_KcMcMT>#J&|=S1eN zw(#*(kC9)TD)bv|O_&V?gGG|7?A#Y1es7BV9=S>Xo*aiafnR?uE>C?d4~)dqcGLiH z{Ocxu<6-eBd3HdoE{ydaGDR8uSUm-LJqcu$92u_O6j$*B(TeS%l&BZKcz at G)m#bcK zOw*KU7e&(4A#p{oj1^^ukU3U31~($~g1b-XF5GrwcZbdi_to#BaG#GWx+PY$2i#u! z4fol}*wJwxVBF|Jn*ALe9cXYEe{`tN@~{^lxLJZ&(xD$6ouF&-10H4cS~td1WOR zU>9=%_Ty4ifL%z69N#mqg&gE+j`jB&4OH at M$h2=)T!dkVfcIk-uwv*p9logtEFX~8=B4PdS%_L9HH9$6Yb0XdTDV{>4sp)nCjbjSk ztTwBWDAZweXz7%VoISdWB>YhT*h9^W$ZwJE`w9` zkukst-chUH+2I~?3?$KLG at 8fffDi*0N^)ftUhklv*XQt3DY9e}9#*P6fhh0idC(9X zAy?XKc~E339{kpZ!Fgm+0us!XjB7~ ze522e>G=SqV7=9&?-sgx{z3I3PT9{o&@2uu+l=TI{v!0}pp09618{OXB^L1l8%beN zbFy{AX!9uM!Cc1Ng#S|WULchuzojuPvIxsFf-xwGpY{(WiNJdxZ z3P(m0)Ol?hG^luh#cTf1o3Sf+0;B3?rvyKcl- at 1W_(4T^iL-OC!p?3BP&*|!=4R|p#h z3mZhEt9ivx*xA&;^hk)_ zj1PmYxHOiY)jw|qvLDvI0`L;j`tXelr zW_Dm?kA?5xi9HrFQ9V-aS$+r?q;thJ)+x&GY3h-FxNTe2Dm)n2pK%uhsr<{+D91S9 znr0D7gD?e~($V2q9w`#-0hSxr-`#YY{g&_h2C+eNXGxXXmH0hIYk z%nW4x2sn3Iwf at 1udXm?*3-C$jAJABnn$W75+a9AJLqZuEkhfWf`%#Uh48w_UMT`Q2 z`W~GB>)13)$okeo*lp8nA?rWsAu(iq6(2qUCexyIX?RB`pR}%a0L3G0bkDRe6hoy_ z`F& zgdFt|ARnwnGqFoH>{3!8Jo7G%rY at z;r;lc6bLV(iI~j6^790)ZFl0Jr{4{_ryVaAw z*EIFuaU^$$t+Hmp;2jfDtClVoNx8TP at 7?dwjb!hrjml^qtU!58 at +J%aD{p198ul+vhfxFQA25eKv1iGKRkFMWCxBC22A8R<7#6 zgA$XYjlTLsp#Ez+R>M+y<~Z}a?i{OYCZmcL}>>G^u~#Y24XdllnA&#cI^Y_%ruBX>IV5 zGxNYOc at qu;w(D}T)}{>Vt(aMPB!4R(Xsd$RbEdZjE0AfmgkKmti&RfOyu?*bvD-i+7nY!A}^AY!#qS$A?h1Z~t^ z_{6MFLnXDlP%9MD=3*M|WtQ|SNJk}-B+W4$QspEhyT~hRIBRToth#u0OvO6+GgO`^ z!5a|yF1w*#OvMSTFTpP2Q(Z{_ex}uxpd*rLP+VbPZ3fYRGDp99;tT-D zJGg*8hqllNlxe|1fb%L54}@esC^Ra6^d!`P ziYBw}Fh*HEynf%@fM>~FBcps^Ad4jxATZXfImS}no#8DaoqZ6}-%a8N^WMv8 zmKdkhkraobsd$zSK^ahP9D&|XedRZ(Le`{~j)~6VA9(TTECw>jm*q(eH8uG#Z0Eo* zsHFtCLRUv%kURy$nn8|ZCu6gkJ%(!v`>3(q1bj>`!;&_6`2u$0h7dHz!!*?|A+`YG zp+$2nqFLFgKEvH#aGo9Z#qGEU;5R=On?cR_01mXu(R6LS0I=>|G~H8fN7b(z5l)8| zkZ8GZ<*sp;#MaDJEa^(GzGm8PBh7+~c_gN{Ph%xN+!k9iJVbqh`Uv at F>DGTi^iQf^ zWhcG~)mo=+Pfhu)lbitBSxW!fghv7LOv2wJf0vd1?#V2O%z)>Ek?y}_y z8{StJFW1>xXv+n;<-zRS^3^$Yo{0vxGQ$`d2yfYSo*aW4U0nDN;O&}E at nkbGnHXfz zf}lBv>`CW*@$X-w`^w~OPw)76krZL0CtV!5?X19hXt=@E^0qA?*F(gK-4 at gP8Js!j^-L25MfB+Bt_O-e8|+cZ+IhWm0bhuW{!Ny7)X~-nF0>A<#3c7Qs3Q2+(9Hw zln-ANGwyni%pn3;5n4aa?U5xpta+b8qGvY#rh!`(sIhmFEvbr}Ma_-+}?iw{)$ zlY?&3eIJ5n;RR5`P*1dmr`lutYmd`ls7z}8$7Xa}LV-7Tst2Cx-Y4PNJY;(}E>$ll z(!GNgmNG+7FTXfWezq>3{weabYWSe$JLTuav7lJX?(UD%YGYI}}aVk>{_DO_reXEop< zY(UmgAUqozIa&+V7`W2dN#b^#DPqMWXW7xLg~|-Va`?NPT_T_H&I1_>1S0^{V`Qy3f={k z*q7a&TdD>6K`mDV^YmWx8fYBUfmBU!1ZEIQcVLL~QcWmD2j^a8p3|sp4_o?DPrRL= zG-$omBIDfq^rWvdV>d(U4Z{fLd`X#>i&K(@a_fTpz^4mD=M_5Xoo5&d{33OC(7^>`Rx+z-P*#R{&E7V=S+TacWnwh at H$C93_UN*?9+zB;as9yD3oW;+40j zP(6O;*L_OeK+w__0W`lWQuJ#ykeCfLe7?L>L)3L4t8EpkjpkY;3vS3)r!ySb=?n0< zy^!sMZ#MHz`nEvLV5sM{b^-j)lR_)Co3f!&{P1f3k z(kBt;Oqmv(<<7p(7?fZ_|F#|Gf-s*tdkC%lG?cp9+ at X;R-Jt>tDl3 at Xq0u;aHyL|+ z2ZuGRe%C^3ML-)4<;u5{W$4ASF|Y%?Q0K^JI8J$)x`qv7pQwZM#HS40xC6H4C$N;& z4JYwNL?}}!^BiX;isva at UY>fWj+fa*-_SMyYRjbz=rMCMLJb)aWI7)2*;4|8yd|fp zEml7}9Z8Us$xqHT@<#ZT8;q!%h at aJ$pQdgv at G%@b==)&K%zJrQ_J<$g8XFgq>bqEJ zp$5FK-U{h{wD_g58*&j&YJ;7%_YnNJDj29%^aB0N3 zyJ&BcmT!e~n(=CBZB>hDSJ;8sq8d2lg{wJ+P zA4Nt0y%8AZMGM(z#>e9Gi*kpR+tsq^$sT3FuTVS7$orWIJ%auCXtmLx6WIn48vdw< z?TTJQjeUX6)rt6RlZN_qm)KBZ?$ve$k4el?HwpE+{eb038uLGOc)0|_ at l18-$zl_c zs(&jc$4=8N&~4r}_82CG9!K*GQhrFlK?qH^%~ULFfv$MCii{+*sWNQ>H+5fMsPBA(5538fEVQMyR?DWk@}i?}w?=|`#R2kORJ zGTj_RTHt?3Mtv_8(2QbTi23AgDWX>Aa~)U%Tx#l_pA z++|&RHJQp04Yi#kw|a%P;$65+GDWi1FL$SAs;`}*%^omeaGKA6Bn>1=S}ptvW$5q_ z&x-#TxY5BPVjiBLF-44U;1R-6*&2Y{4vd<0%{Cwugt8l<*ttXkx>exZY-<2m z&&y~k{;Lun=VNv3Beml+AHY;J?`5-XOtPrkz9hrIQOG5#mpw|k)q!FD`#Vl?ZhYY!O{e}C8o`DnK3FP3JxLu?l)XW)Djqh!!fLTI1Vn0 z4aeWUjd6%0E<(R1+hG>*!)L^kk|A+ARlm`I1klmI>T-b=4G5lFpBb+jsi1szddWo#u96qj at b-Ctv6jCPjTLuH1q@nPd z at WdtyajcqS9(?6)FyidoS<*){(Q-(bjv}m4^)pS~-NW zdUzW;sW9>jLrmDajWA!5Jo&rJ$p(EM-Ep5b;2k%aU at 7Aq&{py7Wb{FrJ6pzF1!l9NBe zkS_h2B4X#3|Hp}5&DE!pa45*S6pc;MjHrs>XpU#xSossQ6!0vX6XXghxLCN+XD`zP zT|bpZRIRI_H4+%YUpMmcNRub;x(uzklQ-ewP?rGV!40|ZJ`;;a7fKp at gKm=>vm>Ql zqHezc*J;uZvv5lkxw|b&tKgZkz=rz}zN(K5?k%dqxI?eC*oEjG=Od`+DiIaJ_DI#I%wOEzqZ)L-?z8xvrr zNEu(K2K4KB%MkG2F>dw-nBV8KwwcImyL}u1ebkg-b`;2>Y{`$k%Io7fb zNC^_3u}l0wTToQq&6A0y1RiaBlz9y zXP#+4zOwI-!G1zw{rnk6B-Z8KeFlBn7y1j1Vph*q9C{*}n{fgjs^Zu=2t-oC!l^k| zFA_ at sg5y6mCvlPP#+^nd0b2-bd_oYRWOPZ at nz{W8Pu9^gO(;hxr!VJ0I1C=831|o2 z97Gx5J zvLwrOcwSJyN7?v=`6H2~(2~gHSWCjo$dK`#69#{1d}JN~70OPb%uHKlYX3r+ZgSNN zPXy3WRzX!a6rZa~kne<4zHVUuZcESwz7h%q3YvT&X9EmhgAJ;M0bd0W-8oB~D+56s zyuU-Ktt6sU68o`_)nD~9#LzWzKEE4R&{&;y`0ENjm;mcSA6w<ozwO5B<=sRy0|JklG*4Vk2)xPxc6;iuv_v-PaazG!n#94FQM|HpB# z*4be|a$*$AX-zRKcLZ>1_(&X<`zRjEZBF0~L~j={5}HumKT(35y~qfwISFW0PYl&1 zz*{gNp at sf--1so-CnC3UWarBn-vim1Eob+C)7i+*U41sP^M$TNa<>q}Rw2HdsN*{f z_m>Q>U^OwP;&;n28Z$- at A4ce zdXATUgwZ;4Vi at gdi4DS~*Cl3b^A)H#vr{Cto1})j)CE;KImQyi;v%bIGRI_XW3ej&|1YrE_?#FPn{STCVvd~iVzHCGvDjmMVzI(_EY at YxvDj^W zVzHO9qgX6}W4FYtUu8>*WKuU6V>s+){5s3rjl=Rx>LYX!5xj{mWtzMhA4=vq&O&$j zSV^miA+fO&yOG$wUGYflz|uG*_QBF968mo-NbKMi9f`fpvxQMPNT=ms@{m+k7d`I!GU+I(y9`d8k*y#93w9Bl3KF7OuNr*G6>W6M5|`jo84$BoaZ(f&KZl5s+v3wvLMm;y(sUZ zE0aY_V at fMyU^txYJ;H(2!IdbofmT>~E8f=}pInQr__N-Mqf at aJ$DtKJimo3~;q3fF z(dXSCZw5JnIgARkK6a1}vyX6*UDiDs(Z;Nt8g0ys9*tQLYs?AJbt1SW{#LY>Urt8- z#l|^4?Dltv_KWR%rB~4^H6ScpS78U{wX`KL|CNN`ac^bRUtzZ|L^y!`C>F1>`OG_{ z_7-o}Ztuts0;F+7}dy at Wk(%+6+3M3P-7w4eP37LvWu9~H-$7|1$XQj;LYBjm2G;}hmRg=1yWD8~c zgW2kb4Y8xL8W2}*cCfdSOHcU=TK4B+=%l%7&Qe=2jb9&~oUa#ciPM3JY!@~RTnHE; zOW07|=C-F<07YxHx!GBP=Y$PrVPj+ULEFFvS7$IuQl at EX732~`x8^EInOC34rgivSt-_<|FojJ!jnAYS zAU|R;aMb|WWef~ubi0#@?s3ia$i52YA^5f?P_o)kuWjDHqZ=5}*%n^`ze*-2FJrvsPsk%LzsuGf&t(}6 z>PwJas{@kHjy*w|CY-}%#%~beE2d{1g`?G}FvcH;H}jx^~%IG at q*e(ju9^VAFnt z5n7tVM{U_v=s?Blc4Y*v!Uxyl(}6Lv%Yb~%9Op{qdgH&1BD7s{!^ry0^`h1sZTiyx zR`NTAdFZH%q8h{RGO1@&H`2~T-MoBD| z6zfD}nUur^DI{o)xxXL`rq+I~dR`8R+hMxcrFCh>@3Js`p>TMN-o(OYAkv|0+Nkxvz+x!V+>OrL?togNUJb{D z2fo&a96Uw${9UkjE0>E-uD=Gg*M_Li9Mv>sR+ at T6Bq~4J at pn8ePDy4|qR0WjUb*ND zEO85~i>%r-2D3cU5j#)>_3mwP+EV%C#ZO&m;BESCYh zqT4DU)d#EZ&@q_!MK at mrV=2*;o#QO^vm4QB$x<0++ zV*AP(SKjAJdgvCDVcC4Cpi@%Dt+)l(RlEG{2&uZuDlOIkts|iCpMkC>nl)4O{-d)C7|S}IOZ{`KV8jni zzsi-Ah)us7eM=}w$<*{3bQxXUVMVvmz}Ddj2rcgikM>BaTBO%Br at IN_zYz;GaA9m; zHbpn<%ht!f^j4p?`jiQOpKCJU2*K#gWLJOlE-oawC at awryjr=Tzbc at AOa49#j;1WT zzONR;P_*wE-ZJu}=FUpwTd)?rZE68wCQJMV@(#6Yj(OvO?yc{&lh8DV-w57G^GH3u zbZ%cghHC79t45`cHyX~u(fU0btcdqvJA4Djk#c7u?DJ*h^)r~j>cYtJpwIEs(2Fee zu)1ZA2_r%}Wi3}w7+zWww9jCvt#qmVy at 4x}XU<@jR>^8W>rB at kL2iWfwrMt_Q1%)f z*)SNBc$ZsIk)9GRk(8Oy!I^ahA)SG4Ws#Ahz(&m4G<}f%d=j~&qrrPHzvghkIAYa- zME;%y?-2=mgqfz)!6zw{x1x=?q!M1Qb{!*nCfN2>T+!`V3~Ef{RhYk648TbKGhKUxcXZw zxfw2K>gB$iS*q&XHk%O`#@orTTX1)|*|(T^(?A at c)t%_kcbl%K7eGC|BGS{#jo#VC z)^!j4(eFK3U%5jI)V3O%C~Wvt#2^}iXOFP?(4b}J;+ at 5%Evw)MhPWz&X2+p9`el&Q z_}N>zMI&me$sMMkwRmT4#3px`gHXVxmg4P3K?k%@k~NGg_~kpe;V`zM+5wO5PM$SO%uHU^kb-R9NmWJ|qb{qZ{-($}%jy|ldhsKh!_pSoUnVjhM>KjpJv zNOZgjQm)tJj)B7CZc at 3?h6s;OM7!962N#L*8JF<*74#M?6y+{>yV!)vwzn8Pt{UNq z!DMiqv at I3*C33vuX1;%)W{Lk6GaAwT(cMueWIY_yT!fxy&T>IEl6yNlhgTY*?62 at E zcbSB;^`yViktRI#H16x#>59|%lc9__FO;ASAgnQa%!kz1H=tgQzfzsKnXB26-I(Hk zTYW%JI7~#Dze)}13EOZBk&iiXGYtkXBO4BmjRzQv#GAazq$W6t zCwa|h at JBLtz~p+(UGnOc#*k-EU?@Cj!C~_1N(1_I^EarYHgMm_Qi1<%$=r$YQ)k6r zVWLWA&fk08Yht3auS5Y8)yB$xo_u^aJ})`4j$i5` zD{}7`qk-e62O5BgY5NX2wl>snLkbJ`>|!ok{0c7x31PGMLP<6F=4xIBXM9b6NuJTJ zEv%oa83wpROU>|frC~tb)T{7qy1RVoG}u3X#R76ozNE}(kK+A?rQ2E3%30*gGvK^i zD81UqH7Xa8_-!%Hkf4&KwZe;Doaq+sGU{{U#EHx;`i>kMv_{ zTl^@@=qlU4rU;{sYk@`h(-pW&Z1Wh5f!pxM#?`7c$KOX2>*A3Lj-h{cC`yN!#J0;{ z>qqj at e93%8HFq`YF^Cl&+ywS=j}6ZGQQX at 7el+~?Bx?0uZM#?AW^DmjHpe!>$H25?9UpkbfmZYQc~;2rupVyel#A2865kn?1IGM#oIwGu23n6pTlyf)Dg7ywK&oJq+}?4TKDFF!h^9ZNq)Wu%HbVvL;) z8N13p?=;A%zdfq9wO6f5!~h at ae_!Luz-Yy*=H7=jRb>K66GZv_h3a^ovH(+75TmMD zrvj(hJ}m`xY=Q;A0au{fbwLNLoYRJ2#wJvhQNKH^X+Ra6#$A{qDbv!W&@|Y at W>V=7 zO>>5(*&;(>S)Hz*=EMT^(`@)7g=Rvofh99jOoI)E?rVA8p(5TPF5^QtQzWWwpPE8N z**?_Y+M#|j2#&CV6b*KhT;MeDktwi8V+C{hZrjup;PUt2t4>WB>^WHUR(YzbQWnGcl}&{%(uK7YfdZoxM7{Be7LYD1ecw6fzPzLXoSFakef)kj_uYHmeRn5BDqx{_Z&sz)iv3G|I4{3)et%ngu~HPFvZbG at YR4@ZR=b=jC5zt0(EM1V zMVY`J{gkME<(ewDf$UIrtJ_&BE?RY?*P;Sn?LvtB;hw&{_ at kTI&&Z0ZOSZCWsmS0l zvnxI&g5|dyY>8ep-8RbBE-g`Mo7;(m+M(KbA6;1V at S`s(a5y|_oBl6 zOp%0$K>0Mg<)f(dW|tXiFWbrOK2ncUNke(vorvpOP{eo@`~4 at ni1EI6$svzDf7)v# z(OafT^zblP4(B^iyBL+n6-ZsVzDE@*{%qiClpiFLLh2ykO)d3OHU`(Z%vAy#I<*qu z&^v{O3f55c^H2$BQzZ z{*6%da#ArA)trolFBHj}Dp~|zAq<<^B>2Xg`R{S<`Cr=apS9oLX}=}$^jpyvzYHJr zc1;(aM<2g}507JSmrd|pi!@u;Si!fAh-!!68%jCj1m6HkOp<1MyNU$gc6z!;y3pJ8 zfZ&U#rxL-3N*Tdnv50Boo!)xX1eLB4W!OFy`=!f;4e;T0-zCqx;B}y!(PZ9`Q at f)p z*iNoNCTC3yevkKX@(>@;sln}N(9^jw7rFAUW-mV2g#YU)T%@x?B z+to%ObNWhV^oIgiyPgA zi-toYtijt4+}bvZ_~Gy!d7?P>_Ii7kE7umd{aB?v%dJeZ(cLhX*&WG>aqhRoF%R?l)>~@Fe%Z^7mYFs zS4qm;cA6xtWgz;ro&!m%l;9RN+|s2ChBYy!226EVR6k*McD7>Y z%uv54iC1fK$ki}6Pnc~aHBpjn-tRgcmQxO4!YMnT)nfEJ8h8^6#m?42O_yy#@3=OM z1Gl}RX-OOg$I8SAf7MRdo}EWsa&=%#uD7dTvB_qtM|WbIl^rg!QVUdIEje|fTIWa8 ziwXti(@1jv#4<}RH(~^M6$?PH)-qeUis6D553>a}Od~D- z?Q96Tdh$D#i^LGft#YRlZ`<$}B6Q+KIkln>%4A^-h4rV!k`4e}qo%JJjA19-Wp-e7 z(Z*eNWi4>Y^;(1rpi_3*ku^}8ZStA&A5fD(8U(|Irn#xEK1fqNPBly^BUL?W>_u96 z7M0%_Szcxt;W5(`;5`AxwKz;C1dfNjU~<+}q2d-~z;Md0#R)RcOTaZZ0_LS-qftu3 zha4IUWaX80gk4_~~UbGUO-;Z|-km1!cAspcq?DOeCr z=|=cFQ_61_&WTK4Ww+qv)3M2C^iR|qB*5}GPor4-XI&tfJj*Z zD)nRJD;S&D&Q?W$k4z4vl;PU}yV#5%!CUx?nUW3uUIzQ8>~xNFDf}HF^@G2;LPZv@ z!5kc%hs4+#~D603-HfW{4*KM~=;23n3x-yD^&dKw zH&Ha10|F6TRx2Y8!o@{K1;TZbh#z0TSq%HLoMH$_V z2C;i!%YS-|uC=zapJ35*b-3O_#mulBL_S&myU`}976Y*aQZM`!Cg`o6P~e+$!iWJU7P#MpwxX6Hi4$TBi)M%=$AM}2?k18 at v{R_kl=co=hF3~s-`>xsJ6>&|tAkVh z7ha2gbx0*Qr5#A7Ts8fU{>Z_n{)J~AWi4(pXL;!+JUO$D8`L`GdG7q=jYGWzyqcx^ zS+WD!mVu(Ky;-Kh7|+d}whSaor5ogeoF~%Y1R25pjdo4)q%^zS0Qh-#uEAhw at CfqQ z3}5vVg17CAe{6=5;2ocli5CF!_%v^c*??;RB?lo_*CF1tx4&K8l=nAdiZ>nG6?Tb1^`G5%;G>^XcO!zGz)f5dS at Zl(f<%sn^6R%b-_7mC~{n9 z8)C9EM#0|0=c>2<3MxcS)9y^uD5%?A5d)aOx_U5T9Q*oCYVBtBMi0i7qgbAIg=Rk_ zzo8EX>v^oGkS|yscfv1*>fI#D``C{s;ob2WVo7^P&C|f~-}Bed%^yuOATX3scGLjP z%5I`EQhqy>r*_mBrGfsZ%pEmpg3nKAY;2nMV2|iG`NwC#nt?KJ+b`b67+5ubC*jXE zOqikzjHEuyI3+ABN=Ccx*SX6L>{MK}0fj~+HS-0K9M8d(ne%X(7#L%OkH}-Q)JFeU zbH$Ox1MTugc>VIL0jf*6%iEL*EePI>D78nGF7x*ENEf3|2w19t^ppZ*V>K4LsYo=X z)S`uY>SPmOP at 6eZx-hRr-ewPsiD7S|mKOzR`h|QKj8ChwXXREJl>uLH1<1)jqHcqa zv-d{er!8lI0Rl*26&cCne8{uITQc+hbLdVx<)frywokMqd8WGy%%0JroV*UjUhHo4 zP30$5dX%n>M^P~>n%xcDRu^p#1?DP5tT4!J+VAoY!bp7Rl#RQ>U|D;)gt!)Kk&{vR z!k-+iJ`%c5Mwv_*8p(xZUc!0`>@GZsF=xF84|szKaEv)NQvi7Gy9v%y)k*Pyjl!xl z^3!fq5?H1MhNGF(0J1}NKs-F&)9XV9YiH?i9~0$nH38Kf|B6D=R>Y4f!m37g-&1t? z`t3P1!p|WyD&sPvGA=WE>lb6l(J{%4T!BZ at Tz>Z&w9=OkVO)`ks6V;@71v3Y)Zkco z9~uKXe5HOzB}tG9QTa22-TY_$$h_)Bez at 0@>+vS(Tf{nm6s(?w?6pn`K^kX7i at wyi zZiq1;{uqKMa)zs<`Yf?fY?ESwW%!1s*oy;2NJKY&_ACbHZUd z0i$n+DH+KyO1RvLtr>t&`=6jG_!{pdH?uE3HyGTe?=Y-+7OUn(zYwQ at jwvPvuZQ|S z8fME+$r36)Per|ISC&!mEl1-r*boy*wFDY7wQ_P+e&w_>NMbm|VrVHexCs7R;eW2R z>Uia}751uc?#bIYMc!32BfjgN~!nMaL&ehPu(I- z@}6=EzPAyXopK32nG&~2h2B#$rE91W-cz%sB=4#D(xBibl9DVk&h{ib)s3c;F27@$ zGQa>Ww8tD$QtgI?S^18bALz9z;7zT*4#77aRw}Z7fWr%M9Zj at XJyM1vX!kV<6)(|7 zbF2a8mhn)Ske01o$QLV%s3xpmS!t_s7=f=C;0(B9$_hq$QeAS8ih~m8Ec)`2>pW&C z2`(VELi)OJmbAChCi81CiagB&tHbu%sw>M!uANhA4p!)0}R%`QOfq*wXyX4(BfO* z?=0yuY`ah~D3)YCBVk5-o04z|!Wy;9?ZJ*DbPzDzpt+Fqsw0GoE8`3X7$|FS82Q<5 zU9XjR$i?mg`5S9xz7pqEb39k#Ql5znzMTfcDQ~ssd~Y{(u5bQrFlWp>s at DlWN5>;;mrrZ^9r&%itO=7^|6HVVBH at 5)T!IkJWlUz}J)=f9W zzCVz&k*)YeN&h0 at ONn;mUtsA0I;iCCOO4b=YgMjwR4438T1>+CU!t=7WS?~?*i&ZN7ON_OG8VZNqPI}h%IpV(9?h`YXQp`gM^|gR4huQwKxZ< zoT|xL1-=%^WS6`0r?m;b8_>&w>~7Tr8^Oy at B9D;cL2Gj870n-Y%@gO4+mycAj4kL2 zwU&ndu3n%LS&uC+9o0vlcmM3spV7%jAE_p794_vP9DVa#8vW&Kc;)T1tD#ep;FNa( zKe%F*b^(W at 08@kB1MT at h2zlTHnytmqwpYgH3G0b4N|jF6s(={>72O zJi)h(=XnI*r#x?g;7h_F--ym`qzd40iOsS!2o(YN7ETQ9R9noRN}MD><(uLWakv at M z4#6+rLH4W&eVuv at gWn-cXm^>~Y-NR6_j$}t^-Fstu*4Vwz}*h?WF~v(S`>*CW=-S0 z5h|ABHvDv&+w^U{<5Xr4Rj43cIKuOY!*@iQX{$=fD(jvm-4B`eQNej}_Npc&-chyH zUKMn}^A!$Z)h=lIRyWWO@=ou;1ZNF1IbxKSLvGW~G3Lo}?Ep6SYXGSM&>DnkBbchj zmP5S!x+BUEirrAf;4c&dP%j81Bnqbq6(!{KepH>{yBK)k;LEi0ay$caj^a%4aPlE9 zQB`|!%17ep%=D_co=J97&?#@hi5|%sPK1^fioQ at m4nlYwJ zfPVLcRpHgg*OE<`a9lCLq6B;c=km1WHo8cmmJA at VAhab8+Jfc$6&faeeEZnsjgA}z zM}&OtnCNvnuUY!9A$2u=ShpMa2`t%hgL(RS)x?E>fm4O&cGGE@>B+&pB~&yJ0n{Mx z0UFliJ(M7+P^B8il{?5WIiWrWo-A7lr{@B|VIDWUXBE8hh#zl6>~1cBpWO zGyic_t#rJ;2E)`MrT`@N8dB(7~r@%28k~| z06~?wy3iw>>`qf1(PoJ}$7$M+ZwB|FG?I_ml~ehcUJx47k3I?^C$g0S%F5Cz_`16_ zk)4R|C( zQJ0D}GnU25+pNO+1MI at tI69tX@+t2R2cX>h3$cHva2W)~H4}LUq?`-Ij3(foJj?h> zinHhZDBVE&4=Ft)5WYGZ+Va6!=z>>Un5n?5V9bG)+TcMAFlJW4hd#8#&Tp{J;4$R= zrL3vAyZ0KRceiu{?rbTScJ+*h^+fTGt274t4z@ zI47s_p4YUYN3*GH091!xmQP{te>f010EDUn(oM}qKvyvOQ}w>HP~Yvm at 2%~6-^Z(G z^?mM#)OY+0^<9eZ{06I&ITpOdCfKPzD>_4`=}_1PbFi^W9IXl<8I}VYL??NUWx#%G zO2(H; zJMgjtXbm8@=nkn>zeC!Hk#@8 at q^ENE9a1xQyH^%=2^iH=X2T012 at X=ki(GnBzDyUHJX*%3Hy@{+ExpQQpz{;wrBE-tx&?%At6KBUFOD9O#rD9O!h zQIeZmG|A0vA<50rXvxiOA<50r|0KC_|3}G<`!yHf!8x|7y(qc4z*aVDl9UL4b=eKj z`NQ$b+U|b at mg<>DrQgF}lHHuqWH-e`(UR=uNA~mCvYTIK^pf4oM%m3Hja+t9OtPD` zy=6D(pnzJ%1vpKJO?NLGuMTH)!DDa1Wb#P2AHuF%U+*JHx-b`lQVlt|AdK=l)^cV at ca8=i>5p*#5V zLyI4Wzq6zw92}vdFcv8a5~fh%ibNR-58J&5Mh%6+}re@*=2$|~wD0H_QJ09NP%fGkaj^8yzup#We< zH?kUy{QE57K|&aB!GJWMN368Amssh&uvlqn_iu`oD!DWz+?ZE2Sx~GdQDRjOXxC`y zXd?6p$@SlN&!N@$FN5B_W0CR=0Mo@)P!^Mu^AoUxR9@!LwD5 at Oh0tpUYVlHmGYR;kkhKB zKiq+HHRUyEWwRzv3R+In_mWJ<@a6|TCZg`lbElCXt2!3!BuUYkIfCyn#}@Mh-&Z`( zBlr&Syaj^qVv-&KLLC8T8 at gGkSbaXZ(J7L*uL+X8+w(4bj!TM;xlAp9x9 at WyHRDNj zs+B`>3P3{AioK>Zk*ruY$v3H}qgzl>F#;r2OdU+^_kO^BY2>ye3E0 zQF2GaMM>@F79|DIQLH0BN$_2D28RV|4f=FB(lMNlC^^zT8c)KwBu9c3q!T)Mz2rzo zG&$0D5ptv=K3o6b>xb~ACPxxUj`RhWBLT)IY=1gJjnF-m56eqcV%MLr at h8KJlJxY zVRpjOqhe at 9!*bGz9myeF@{DRa&`s`~HhYznHCZ3xbU;s{Tw}{=MCDW*o27&{fqbx; zs9rYzcyEbP4?o6Bq_Mc^A|*;&8fD1yS?|d6WNNA`K z>m at nUVUjaZUyBj3qEy7x>L=j{5<%lTF>*(Tlm=}y2S;~m-%-}vE6u^HwMQ2c^hkWd z!bG70K at DU({H at A*Gy z)aQSYGJSe?sO#sDGOa8MOPQK}L&{XdrA%7i+d_TUq)eCprj#k=P3k+BGP!z5nGi>! zl&OaL)2T0~6m(PoCo48MsQjue94yS268y&CQOGKV-yr35AoQyAz%PiF`si?t+|~vqM*Yfe8YWbn0D1x#J)ql_ z4rG#%B|eCfqMzC|NzqPCQiNM(z#D+-Y9RyA+~RoH0f~yqUYW=qFAMqW2);{lBo!k@ z@^=pX2`PAD-~v#_ZY{&9JIuiy@=qbD?PbB=qco}Vvf!+cC<<|PBYWphnkdTIfi%xo zbSgUJVDP36uKwk at f}L2R3jhv1h9f?Mkvu9yq#-e+Cp143u8-HW-GiVqftbk&08 zNHjDpm|+<*5WX^R8oGXwkE1UUS76vhkcFN_9#+v@{#Q6M^3C3ZM(IZQ_HgLQGR*C; z=bMdEGRi4Pu}K at F+(GcIlR)^3aFWPz z)tf?+vdoJNhCEnj?JoJpOBQC?VXz$Nup+NI_#N7LRkV19vNsp0>LvN?@kRRbD|v*A z!fNPx^OQ^eRt&`b*cT%$pN4t+VJIp at lIYeAqi3s#CA137K&g5 zV5+}H{7RIao8Pwx6-aGXCFfO_btefgSL0P9eBfjnDQ&jdybCX!Xpk_h`-QGbSJneu z3R{V0N8agBgbgcJoM_~ED4x(3FzfYVPMy=VQA7{u&%JNthUV5#EUKMM`XiZx0_TZk zYMnAzqZ8zv1j#0a0Fd*>SPuF0Hy5w!reesLVy at 9oJ29`N!Jd^JXP~My?^^qUcwV*P zc-*EDS`&Fp2HrR+L;g}tuVs7?F%E}(%9eA+v=HrHre_ri&&DBH zzA($hwPm!JAV`39t{t_LOqrVXszvFO4fR7SZw|G314 zs%F5eZz;uBt<0o at z$fv;)D^kTmb1}jsh at Xw5Oun%fbmZ6M$@1M=*%HorCo5rvOHvK zuqTkI9&Bh=u$xxa at E;Jx1%HasauvRkKkLKwhH2r$N=A2 at dHdb+ceLU*DdAHinV3 at m2 zykshd^r*o1?Q*?6r<&g?1XA<*&`j5$iIg4XnlvF=0HQx2yqdoKdp<3IK~xhO=<W#~1dHPk5)UutcHGhzl^~N z_6Vi^0XCR0v`9j`K>T0okEh4-?jG!6-T8ZXd+LAg;gRon58XZI at 8NLc**&yi5BnOm ze&GRYTa#+wyBQeraF4F(-J=v(ZARXeTKca=fEUY8u7K~(t&iN%xSQwXG*aLs+{aVk z3v$ic;2DGA^_<$gmK|MoxiQpYzPZnQhojJiUHTL5{|-JFd at Gi069X@&BD6zpfC0)) z;RA#)#e7`k6tg|2UQOS1m=IR7321n`rDw?+#jK_m9mW-nkLfn~H19Cr0h^^qc-llh z3}KAXtt@^T at a$CYsW{JgcKv+)+`AQ${*2(7ZuGvDNYyD#I4dIH;7n?QL$0Tg4U-P@ znfdrM*{+Fs`;~X$T*!tRf(uAW#b^29Wb8z24V?^DDcM%G$Z(P1yAD9uXOH8T2jiEU zAE!e&I2!VEU_GD)d;pI at B#c(~=Y&z^AR>(L*o?lzOAFB!A-f(M9-fDQQ1YrsWz6CA zGx2eq%6Ohr8S^ZoKTZNF<6%)g;F5oF)|^4FSDz$Bc)g1K8K{g5qB0yn4}JLwP#KuZ z9fkF^MWh$=G#U{H=SHMUc^dDZg7-wEZ{z8^Fg+$BeFINli0R=G>5F+f6YIPnA{|fZ zylQG)sb!P!Vzqu}OJ5RJ?_~#H!~-74w)j%-srx-~G$h=a-o15Eu%G at HuqF-G|3AP^ z`z=_P+~g2e8J%j4EnwZ??HV{g*5<9w6jmjPYHh$qIVNvSCdOimvs-GU9rm0Wn_T^y zK46cEVJEN`rc=Z}Pd3{zS0_@}KL}i(7Ry+7w~GF(mnaXTmxGSYw8d4$yCwatA zM=-9;QSy$|;>g=yHY(E*c*5vF6U2QkIp{GvO$W*>nNnAo@#CRm{4?W)RPJ6e5%$`#g~=%ZYTPlGYaU@}t% zYj7#h8bRqJtotElUTj{A5)02M(0%HAMG|uB=(ukcG)^&vh8B(`X_`@s;fhrKO>gmN zhp&?DV4W!pT4ot7RG>mb_}%nEdY7nnIPw1Y24|Y#y*T*z1&SL$h7pKa1DPKNF#||n zWjKIbKp+Vk$mBP83(|QD5&$eMvK0oQ;v|IwbY;zS$Xnoigi{!u!j+*A^W^LN?H|yp z);Rj at eMZA2x0uNlmG^R>R!Y(TO=-5lf6HM1qZwM| zk#|Hi*0+p4nU02d818_aA&;)qm|U>^w;r4x&JXZ7e{51E#%TTj0Y8a9P7Z7rx}Xg&O{HJA{?s=)dsTMUW*>H znEYoFT8Jg^H|{p9%AZ1xY^R{PrRL7*`+YR^kLOV&#rw}<8$~Jh%57x{A+g4z+>drRa z?Da3gA9(pst#I_Y^QztGDGlgAv{=n%b2kvFgJs&w at QNMjNEgn}ENpbIXnBetYyeVh~35VITIPU6h02K6JBK>cf8M!=1DBK8&5p`*4*; zziS*Yb!EE?cg4==uBf}t-8vT_p^7+u*1`<*9(X)gBm|jU&)4`lmIs6jNyK?t578_xO8x zzaMu4XB~$DH4eIDx9E}`R`PG-vK%CbntOzCHK at 1L;9z_jJ#Fy6+3#qzsk<;NM2D1D zQN7?2)!n+lj29`D8A=+Qxy>+ at B^$X7bgbN<^eLFuI;L{!<*M*zU0`evI at g}a9iq+C z9ijok(au}42hZk{5jh-dfmmk00zULMA=(gYxWzVqhj=1#QGS*+c{#rK`8nPr+9($m zyPIOF1Fjf at NBHR9sLMYchHnlzY<|L2Z!&nhdOVrT^gG>jHkozI3hRH}%}IFJ^7lsc z4X<_C5;bIUe*PHuPITwoj at pNEx4WWA@cl$LUe%s-q(layYXTCrqZrUY+HQici4t(K zqkPS2X at 1Xk5<+4TtoHH59288MiDpQ#V#D at 8g#WIa#p`CjXQFCg}IPk5ZqzqS(Rz!zRS{Xeg* z9go3##pil2T9>k$d!}f8Nbk_by8s>ZL+P9?V}+M{ky-%+6snJ|h>?=LjhTL1Mi5OX zyp2Xkax?1$*7!6=92R98EEgj>YcQcxS$P~BTz93(jqIO5JNU<$@n}VlJS2VmBP2S; zWjyVKgSQWqNpF``!3Vq zZ}h8j7=dqRj~8z!hH^DE3aQ+6q`ff$P}-(kg8r#WldVqKZP)uGm#&SvBd%%cs z6pytauS?^+^l|uj9FcFt|6txc%3Go5og+>{F2)J&cH|uvy(eP at -z181deS8LoM@uc z9V7VCk!|Tt7knv{7{HAWktZO(Ro-ran-+Py%~Bs=Or%6_cd}5Ch_?)nV&qC-pK$qi zk-&aP=k3t)g4dSg4cIj>1RvgEG_ZSKL!XqstmU*y0C=+zeM!WaT>v3Kjg2kYtz7{TU6yV9aTl1HpR at Ko^XXz6zoPS(xz;-bN4NYK#=;Rnt8; zc}GwD2GPRxTGXc0r_ohu`j+h!k<(-L$6X8M$^f>wgaRU{={DFqeiIMFD{uD(p+&C~ z%NHxn_68HBo4vt!sSj{s_u&G{404BvsV!gQiu|>iFGlJ}C6#Pb)34q^8{|U(Q62}_ zRRr4 at hLws5E}p}^ig339+%p8XJOVDC;J#@lxQ7t#a)7&y;1)!{r4igJ4tEU{!^lW) zB6oM#1KdHKGQz7Gr7W*%l7 at QKROuqGDlCQ(>$^COO at 2_v!L~az|My|~91fI;A at A9G z4Bb%JpiuZX*w4uR6z6rwJG?)}@Ea6JNDty4H~uN|{%DdWd4G(RuJ-;IFAY#PN!PQz z59pt+za1DEt1B?Aj~0`#&c

      })>o4o|2&?pNT%}E}zyoHtd{vf$F-+R_)7SRGPssaO zv6_Bz8%5`I;w3j;Jf0?6#cx1D^$r0^N{i;uFz9vsjv4`vXtUI6)Pp6 ztT|h`$|bKy)ZikAD&D_g+5nKAes*~k;*o}D at Gp)6S5+y-!UDk8w;2uc4m^G3xFe7@ zHClEo8ZMS>qKG5UP}jZF8_fo_7I>jdIrU2bQg*Vt=aA#sENB^G%RBz0svD3Cw;b at K z%CD at X9RRef5+`qlAIky2CeY+WXfl^Rxct;!%K<^!x_!vN^r~YjpuICk7al(L%J_2OXhWo=Y%if!SY*3J52h1SsQe?uYk`q0rOSl=DVbE?wFjbs#5m)O*q>R3sUBuwtv7T+NArg<00TBP8O$@4os9 z-kdOK&QxgsX2f<+dKO$Vo2YC4ic{Xk=DdPP9l)fifhk5*RzCD14h_Y$bGqb49h9ql zQ?xltsBUSfv*d)P!oH2Ic#(@3co=$&EM!KKX<)2Im_EyUledKpfP(`a)WM^EEpcz1 zE#OeO6JS?KBYP&5M+Xj7(_%s)p0b7$r(t*L>^m21tuIhcZrSCTj4}pZDDSq(Th#QH zCX~E%L1V9gYUEwah|Q!nO6hir at vK3vXreFv5xqoDH*qh~gkDVcpL| zC|0?=+m122;k`Lc{A%Yyv|B(IIi}zwqLuuq-|acaIuFHHBc3$%&&P4qe!htgk-%~C z%dMvWa}!^!mW{&Gf6xe=oUdJh60_5L+AKU>&et2u_yetWr{&9e6Da7qXsH#2G0o`7 z9uD19gn9V|vZL}05CeB+ik97T$6 at Ybo-2*Qh at v=cfkN01=*tvNjMU>OcHOF`OZXg^ z0L at QO(;wo6>=+hq46W%%ZbOgdrpeV2mjrIjV1Z?PY2cQ-gxB(qV-ykoPxu1_T3ldk zh7ylM9vxTutoS4_a7USH;{k?Bj^XevYC`1O2G8T^`H?0XG8h)&=|9m>bvbiRy8 at 5F z at STC-L!J$FuJ*p94d2FjQ_eAbeGz8LGE&RHh?%J16B53dya00AEgg^KZlmZsLkZ*+;U*mb;&VUdV7ZOa`o}L zEv^zd+p5n5!%uSMQW>c*T=1j0**xQey3rHQ?mq??A*zl* z>PN&s{ZMZ(Xhq0_$h9yyX|BN2gf5Z&0KVfVF;GR%z<0#i0f5+!^Tb{U_JKV!i_|VT zXC(NZ?L`C?6aa-7`an$@Qa(S0o`LL%g>n-N1xZZqz=H;c7ri+89B3co?TYiHy8KSF zEnq!OF4CMtOCzy$Ieq6nVsQK)eyYPqd zg69l|YWXvyAUr~Y7k|A-P2hb>dBHL$3}qws3qD8LFJN{G&qfih@??3zOA&b(3qdI@ zF9<~BVZ>VHj`D)%Bl1vtRk^9W;KhhM3`wmFFE98sCY*=P*pva~1)o5k+^`NCA=qpE zR3^WKf5{KCO>Op5Pn0AaDA^ecIijD&CHPej>Ajtc9$15G%F^gbkQ;-U1Qxl+^VEXT zPaZKEJaO#QgNQSRxQ^h~jT?bTg*;2QmRU0ACze?ZQXUX>SC&~W^(5w1SE#U^a5P|$2E3^m^(+d+rkafOg&zXr!m+65S8-CqA2+(jNRkE^lp()|G`4>tzI|F& z)iTTIz#OA=wOI0VC?q2*tO^+yOY_J*|1M-mll)}hGM?fWm(+_U7!8AGHB>qnKLGVT zdkQEI at 352_;xVr$8T17M6#UxzdZNa^>jWj&dsPL}9-2VoNH z$EgMdonz;9;KiBmTWAGdqs5h356}U@(sRCo*@+-QXere)>xK*sfTW)l5q$tI&dkr?+7Y=)h7IYe0{=HbUO at 9#z^D1 zOr!}F9UgOGrZlcFQ5p*aa-*=mkySotpiT`!Nlz6nVCoybsC82z^?WG6JQ at SFE_gSg zbNY@!11EPSB;0p;v$6m({ybt+{QxF?T-*Yg+7>t#wszixI#nbZAO~<~V5Lt*7 at WKKA4HYd% z=S>w$P^-z?t%LT6WA8(WmbF&*(&ZzAF&h9Bm!s|}zsDJclK+oG? z%B$`EA at 4z2G3g4tbjcs`p?V(KyH!n}wH|L~4)f0nzQ1!SwbWjaDPf$n7)V{_>CjU@ zp(1U*nA3{Cx#EsfKzQf1*aPE at z>h#yHs)ul=}Fay92hsW+sbxUgqV5BMm5mu_!_VZ zxFIndoTe?deFG8j$;gcSr4x_$Uqy`4-0%f9tzJo{r?p>%Hv2 at J@FKDb#tENEG10gW z($Gm7ckf%qkoww&>;8OT#OpbDL^en%VkhA9IQv^9(!m(BYWkA(novTpEIBn3~L=uzB0gqd>hrPYaPM(M;bHf&kD0>ti zH9Z}?Mr(G%4(%iIKP{e%fgeMq4=J63A7f!`zJ{ke>wkuwB(A55w=6 zY?XtMAgRC`ZmKD%a%x`{2$G4)AK}x>CWIoDd$*kZFXBEqX9sk*qb5NbRAn4f)jx4m zt>;crt${DzC3e=Jx`F5WW#RDBw+8wrmX;qC-Y>i zufroF6|21*B{%nuzT-P0xQ3isiwokY^aviq7mEc$PC|xsKU1u^I z4%{Sf`8e8;lW`WWW6$BUrQz`m$SEr<80`@_Kh8!_EmAxZ_ewLe2myP(F+!>laSd@! znt_~GU_c&RJlX81JG?T!9v!;WKF8sE2|o|9Qwd5V{*W4xsUB4gniw}TYY+JtS%*YG zlNV$7pea4|mWh_S#}n4jTKFTsv_^5lfifcdH=T99zf#ysA1R^_{GyR}AUC1|ga2~} zVny$zcrz*0JE4U)Qma|0)f=(Z{k)pr0WmN#2Ipq3j1)AvPwCHUT)IGMm5T&Q%Se at A zH@@1xjGOghZ&!-`JmtN!I#pwVPU$|W(QA7X_uAf{mAZ71>!d;wu at tUTm%!7NUbMd! zN8^L=Vht>ioojJFFjuAKg!!>EBt_%71D0|oR^#2z5i1pi9=UG$Al=!Xbjd;f;+du7 zx>~MtR~+Gu>c at 7B4-fAb(01ifr+*q9{z<4HmxNdLDVR#oqF%{$Fu08f5K}o6IrE*T zL)RSAbvQ^Lm(!|n(yHi?E*35Ik0)>paX~1m?{?O-n1-?sZ`s&qNc}XOR5Kz*bKWQ# zfL`1>s at L<2)@i+TNY3tm8*PN6{1b*y=b=%d&VO}lxbxNUDL7yJ7h~5e71Ii>e=0`_xrnYNBxfvaN3r==g)Q9NHqPu!}nqemerXDAmz=bfB--m-`~KY%{v z@?XPScvHo>2jS6z|1=0Z27Fp527LH}KnqDI$Mw9T3p(+x1WrJ?+vy*_0>4#cmleBw z$}S(b%ilshD+})KFZjlyQE^~aY+#yJE+ zBs#TQi?UfJL+5OtqxKi6)Y{}NNHi3&SBBF9XPDZQ6wq1vxY@{ ztkH0gXr77~lJRqwW at KtRj}ofj`yC}Z1m6QOIFT?_a$Tj-1U$*M(a3KsJSMxzq-Ktq ze#L4oK*V_7-i29n;)JC+F{qGg&zgg=yDasKugRa9^|*8;k`D9p2TGa3s at qj1Ni3-~ z?H*Gfn+eZGOY`DiQ1?FnZq at T|*@4b#@|}SIehl at C=*}N+2=bb2YcUfh9x(d^GygWO z8F)#)t3Lo+0XTmRh7xxMo=0YzqS&HpfIVO}x}gz3v=K>}5zk+L>U^(1*XwodRofc8 z1E&i1#PJG7p%fwTK3b)g!dACc-oM0Fc^~|L8^zhS%D3>R01)fq0vd|E&`=Dgq391o zVWW6Orro6uzt4d^*RX5R6EyWlb5&Uo-`Za$x2U4%SeRsVLs0kDf1bgxfJU}g8~0iya7g)JQu9x4}U_McPslp^t+wC zO~2dNtMDs-gV-W~sXAq{D3|lDR7<9I)hq_$>aI5$xO-dw>r~_~x6MF=yB4Ft|4|Ch!`2`r at GR|O6!2evxJQi$?U?VBAGSK>c_L~u={G?n(=54*bP>4Z zDRtVPfoK^Zm!Z;eMchyzu03W}IE94AayflYi%|lna9nU5vUajjCew6YU%y`UrS__C z`+$9&u-+*}Qq_B`Y}|aT&1> z9>XOP6v3ZNo4m~-Zv{HmCht+xU;Yre%l44rAE1m_GU5ul`UWi-c3>JHsvy$AycgjryGdwBtV(-X(;XV8$qgmA`fkbxAm+brg7 zWfR-5ov=7%Se!B}&cKYc(C&_iN38KGbI at Jh4#e6Mo$_8A9Ix4cBxGOK8d`O4 at XJ9g zwug=oC|nsjJs?dt<6M at JEpx>b4Arum&XGEx_sW2W*GzDVHNyqerJMs8C=IX=9Kzp{ zufWJXX2m_08 at 0zup9}3VgY21!mc>E0+-{do0>v4R2xm at g=d?L6{IEj?wa^NLje>jR z-|yodY18%y`es}FkUsfKT$Z-^MX)cj^nG#d`Ibn(>hMTqjAA+f>a)wDv&bw}$o%y0V$?jr11`^w8L;1+8e_}L`UJK#Q z?|Uf53Q#OC??vDIFY{ik&%4!zaap at q?x7wPCmiP!=J7Rc-mP|>Oo7~G92dE8f;Asj zJv75b+6+$&8Cd}d%H3-NaxLy1_a|Bx*ea}^I|fL`2kjaoM^_e?qMtMGj4o(}&1_qH zE!64=%xV_>Q`*TB5#E6B;%#uZp|o#8nj_%8mUdDYKyGE%@X`z at y)LRWxs~;WQYgv8 z%;Z)vFfKE)=HmybbfjLI9a*}ImtL=z=0=vT=A}18wicEoeDh6SawWZEjqc47UOHT_ zRg5fsfS2azr4u7d$3iLP76Hsv(KQd_?~Wjd;^@)@UV61&^UTQ7;5Sryjb54`S-P2* zj at 3)&M3%nKOYM5;g2>Y6d8s2*x|APyVSq4GJfTbM3tpij?0hDrp z+jM}&C;$fsxKRgajsnQy0QovVYZQQq0~F{0ZBYP+{!IX*bO06wQ1fqXB-{B&TB1jC zIj^Bmuc0fdh9w+ev<_e}_6E3*1Kgwon4mB0KXW)9rBj|#qQ)a4ApBu zE`XQu8m`}un9tpATWbOyoIoZ@2^{7ri%>k|p at AW8v2ln;G*Grd00a*9-M%hbWivk$B zPaD^nMp~iQM>n!(Z`91Jh^pah4sd07uSWrVzPC3DU%Dm=;7 at ydWAUZcQ2Py?A0Cu%%BgrCGs69NK?330g z)ZQLd!%MAEsJ$x+U~X#^YB$962DrU73bmV~0CG8iGmH$P0EE^k)SeLq at bjK1)Sejy zuys!qYR`@W_~)J|)Sep!Aaj5lI3kGAMl)h0f43(JwOgZVaPEmh?P3%__MRxzJ~0X) zVNVolFNy;Aem4PFbb#V0fQ`GOQ2Wd%fOmICq4qgZ0N&kEsC_{cz=OM^PvTqEP$tD1a|Gz>qKkiRq1X-{1fjhY?5= zz*D=!F~5Y#>i$ur!{4 z`R9pjz<-p;WhW?+%#QF3D?7{+BHPas6IlyS6tO0rC}!0>F_W$4iG20}Pt0L|<%tFC z4NAyJ=&@xyjWiwe at idZeYynRrrN(CSG!kd5n5U8cVv~6qNi8PwG*VTpkf*Cj{>Rfu zE3qt|MzV-y at HA3DEQP1rNcPXuNY}7mTPcmC3}ZZv)C&7IPa|Q%nt2*&5cVZcBe}uW z at HA2u?4LZ1LsqK5HN|S5$Cg+JdNm` zP339C`JJdG%hZQ*IeW~`d05pl6kcp7mOTfx(akJuYLjhKfm<7q at P%*WG+PuK#U zM%2M(^E6@!R?O3g4%lQ&d$kw}`rRnMgMK2 at fb4Wfc(Y5cnC7%iq8du6(+i7G-D zbuQ9JX=Fw1bp3h(UL$^f3ZXW$_FX-y(%RrH21o)niSpN?yp*Uu%SI`oU9Bvp^Z)H4 zV at QWX)1V98J(I at c4tsv0G@vS`Fn8evg#}CcT$NiHBdWFb{3Oqh$Lk6$o?{rapvr2r zRT`XXwXfQ9OgTg{WvMib!xBYH>Z0eNM{(@VU4%}|kTZ|x*m+JG9Z>|iqZDjkrFAq20JSH#YTW=F1UNJ=fZ-!Gdvg2eb{jrX7-%X5L^W# zumMq)W~j8sCU0Svm*^MEW}EzFu#JS6uW)DOjV`%?4Q}XBH{gYsyqzt05U;c7@;nJu zZRUIhkuop;dzCw2Dh?oimARu#q_eBbR9w_<4*a)y!}dFS|fd7jilllahkr$zsRE~ zx5E-Se)bZR%a15E%gB4lK;d z*IE&z*ZHRJ+=LJlX-nh#Rt at GOy^dW3ak~y7Y7i at Rh>tgg=-7*NsEKw|5!CoUy?mfv zetSguJiUCfUVcYJ`J_$Afnx3?4mO43u26ZN_CAN1Y+HM0 at A7_n{dej0Pl+f$-V`do zLodHGqI{=bK1naXE28}4CLE#kUh^;$CsQAx7YL$%FNiq+ at v{!`7(oo^1+k#lig7nZ z&$cfMw->Gn9VojgG at W6fN)Gfd9mv##mjTqn=epH&B{*)~JNrXrABBMX2 z;d5H}xd at +&!q3I{Tx at HI#~qD$-)w6jg#^U?ng0Rz6Tbg$3*mk`2lj3u+ at Bf7{VxCC z&cglk0QWx?iTkJV`*2e}k_4%-UNrHhYig-q=ze;`VlpN8_w@)8=YK{s--q8-{bQvA&XO8(-h-+T!|Qr9 z2OGa>6M*Wq47K8p(m5a>uV`6tGc1$QPT6e;#NPCJPVJ5^v4pwg at 7$)tk*b&i9;2mmaZN>alt#Poa$QJK(Hqun=krwVOyhUnd)4&& zU^n{5X8FfvMxK8mR~;vj6oC1hgbEC|!8ZD3n9;!g^%YU{KpLj+AI6O>-litfRoSRi21mF$BrpnpC! zz54~OZLJgk=us|^ek*+37O)yiElHAKH&vroQk%CcY3|R*>&RqkOI2T&Tmx%Cxgs#; zZCFf&ar5KKYWFHhfiW8(Z7Qqn7S?aFRi!yhO}7MGP0hX*&n_D`Bg<>4La!W!wuP~U z_biUT>Vd+6^JCRoB`DX{McBfN8ILZ84abDT89>C2@ zENeu)ovrfbK`rHnVyIpt)qCt&+5!K7TC934?^7+F^m0B|Fq;0iaiW4~verSf72{b_ z at F7NGk{qs*r9324L=BsZX|G(VW%5an at xP6e&OW#k#**IgjN%n5*YP#5!syACcZq at 2 zo?pW|PklMtV}K)Lif( z_So|~t>+K&>OJ>pRo>C7MVIjwjYc=_9Cc;#Pxxx;j@`Wp{7fI*hgd2Vm-0kAz|1DESLE(d6G(^9;hPpZU%dN5~;#dMvb>5)8Kt7!{8r3 zSSloCV*jN6 at IB8dP4ik!?*m`y9GiwDpCn)cprm;CZbIaDMcD+$36MI~W!<)gab?~2 zEH;+~A1Lb{IRAY(QoifeuH$!5xAZmvDK$LeDMrp`l+T*Sc>FOCQwAwCf}>H)VVHh=QrXYRlKZAx~J at BgER#Qkb{Gb#F)RcnOZpp zvNma1&rz0Vm^@x5h$d?odedT zzp#32W~tu+?~KhV^)HHrbPTDwqjmI}{${k+cNjcZy8>~)41x)_8n|nP#ct~UwP)F) zT%V|_Ti}DXwFv6Q*m{JW4ptCK7!AF3qYz*np1&MOP5+V?!b?fSFuqJ|r at Y+}u*I`M z_iETIWRO&ZBtkLwQbcZAf6M;M>FqMly%bHr`j;-NGLDK(@>~Vvh*NEZJkQT=r0#S#I!^Ep27YHM@;g(J-QrkSRYnKE?Za``7 at mSv8pGq1Ym^}Qk(?6YX&&_}tS2c~xn)SUZF^BK64vM0HyTTTD0f*`&m zh;5=jZthE1$w;48{wH4ktX_T(mG=*oKgG*kdifYC|JAFN-&=vosI5>?@DjIOzZAgZ zIJla=B?Om;_4wmb07yaMu<0f4WFzk6i-^&NoO;nn_M{KGEC-}%^vX(VL+U{+k=xbu z?5NmURZY|U9qA+J^I>q zX(Z6Ke8{9L)n-kbbSMB!)d2db=~rq1h>$sqoI2t~90WltNwbq`DVYxe2;Y5ZpbV=Z z?-N=D6C?clM9TxvhEz&KdBBiTyT32{cY0VL&=?mM2$b3j`bvouRsvZ5qsa2dmpc4? zm8~cf5LO#X$M|E7PT+G&D>S)45P{MVh&_`Y5)7;W#^e1hzhTds2>{n%%Pz#0U4Vsg zrT&2mBbf!HSV2S+hsB-KqIED at vYorQR;v+$8vW&-~a@^xnTZxzyWDS>2RXlZhtl zqPk5=q5&zj)JXNh>Ly`zbxw^kj#TO!=*ZHQ at 7eG>iLkzc4b)p{&7Ykm3ckrIj~QXr z9QTudV{dg26>~*qYlI?makS%P9B&d^?XVZ5Zuc1to&|h7KD#iFPMqb*kxO===2SkY<~As~uC;YPp(v4K9|HmzNs zU0&;Z-E3cLt##L0zy(pPXszN>_v%g4x)*TE{hl*(69VFU at Av)u{`md)e3-eJea_6B zIp at sGIWkG*8aGsDeO^u!b?g at mgfByt5HWwpNE@=Y02zAAeN8 zc#A$`=*zNYa#X6o@(=K>b8oz!z8zW$1LsnxlKd=Ds50Jz*`k1Jns#LkR zlx|Bqq11s;Dk@<>i`WMR6%>$WaZ{>7jddLou4-ILRSH^^s{G?V8LlZ#r7G554ax==P!*;<>L;0W at uWg?{PN|BUKH`=^C#oNWUZR6&R*8jDgiD)9i6h|K z*6488@!d+-kr;e&DSbFfrkNovrZ3kCMP6w!A?7CM$F=Yp_|l~a*vpmUT^c1iQhn^$ zOT}8`Ezc4S5r1Eb at Yvn1f*xpx-8W5L&2_{LiP<8 at A-E0!W>3^KMRmbG^7dDHe=yhG z-p3L06uLp^_nm|2&gm9CySbH-55Rx#$tce?SopC+3;uIB9%m6GlUf8>d7rHQ13KO) zSPGRgb7v#IXx%Lt2w?zS*&}(@lb;fRZNaxT`1ZoU|HN*UlSj0$00?GFnntGoDnMC} z7zM%8hq*0pYnG~ve9B-we`z+6#n#&HJQi0>6hbw05CBsMWgA`Zz1-2nSoS}miR%v> z?V*X*I3gCa#UqaRqrizHs6}MJ6Yi#|9)mNAmP$B-DS3mek;8-v7Iv`|Z^UDpm$(RWdV|1C%kn4^->B*5~Tn^%6qYX6*EYtoK{_v(hS}Blfqpu=LB>Owk zaI}dNp^3Nk>Pl$hul{aL9II!(ZNQEwg^?P%i=f_ypM+1YIh?Dvt4 at 7CucI#foX#5H zgVfJ%VeBUuyRUuS*p|BvbD*%;-)Tb1J`O0Eq3XcPAIoKV19d62K*0a;G0u}J%Z0wW zs%9_U!dd{lXEn1+qf}bX$J#pyPrI at Uq4yo4Q==PM^E&8hJaAaCu;z8D1nlu(H(d+C z!aYCNBEEXhL2L{xzz2{z;m&q^Y8M|#?@i6z=@3^5=Si3 at QdpRPcCv)}?MTub#$ygX z5f8~BYR at z@*C}k0vI8pa1SH2Q;nP;-RqQ11x1gnSoVpoW+DTO6Sz|+loe8ezwU14+ zzHOS;eU{P%!6bjhmGW_AE&HF#n(9_oaP9MQqAl-$To)6(r&KOu-mYvqsq0y*8*o5Z z9=M^D(3gDRKc#fFkS_2;nvUvE0jk(QER0H6S&y^p7QszN0aX%3;;0wBUq?W}6_fHFV4 zhQ6QmkvI+Lz=0;Z!!aeFI$F2vZDOwAk7&tyL4?zKf!m%-UrzIPnJuSai8NS#67_t2 zd5%ih`lZAY7*j0dDLG2Z5hoXDm at NlgRvAtb@|4kVp#{aTG|6+r76P77>4;%CqAv<< zo3z>EphB#gzY8JpLbobWY%!b?Z%C<>DidCMNR=eV(#qm#q0&Ak+0Gpo5(_vTT3}fc zBr|)lqRbv9KBp`Q)bSo=N}cGzY_75^(IjixozpuB13z0lO7N3 zZt$mKD{NH7Pi(~@@t{zO1OKeZd6u4DarJTZ&*SJmh*klwzP3!Ws*@*Qmd*^prT2>8j|=y61FimdWHE3%-sibch{xV>-cKR zMQK+%nxt2}U4^+C$3+)&PYTJNu4YfZ(|)sAexJ?P2UWfu#qDQ4Zs9eWivBErO9SAF zMMtNFCX`QGqb<42l+>abPk#1#b_X7R=)`IQ)zU_$Y>hWw2GWF{ACr{dNzd%sGmhE( zs`7l3hs<(;u`5{qfG(v9HuXw7{D;+OhjQ=kQw=2YhOJUqoK$wDnG%`8)b7vwF? z4sBs|kBUHToH~+j&@72IGLX3)6JZy~$RI{BJnrQoj-0TXnjl4x68qvy&TLLQD*#%r_kmFFXgAm1!aI>`GfS zv=PDz$R)>OSc{fJu^7Sfw at XE)ypIQo)_5a$W)WbUm{%1F{75f4YH7}3N`5C0&!V=^ zEX~cj{d?XDV;Rj*m>ugKWNFd%zMvySX-W)D}dRJLn@@L^J&nf$W z^2a-lx5_l3Q|xgp(fEpSv08t-U9IPzMNkR_O%lMEk`ErhRBha=;=IzzpSF9HKkbB| zZ5j?J0EixJz``;REjF4`!R&+cBOuS$NT-i|C>bCur3<*RoR5+DabmN*8w?eAK_txE zm;&FPT7F_P`5g$D|%YX=+v*Z}D%9xkn2;e`ZJUo at cw>$4U(3}Bb6<0381F-0DVePyUf)P9U0 zV{0jvfaZ$BQP(U=`?aNZrUY$;LK(ir>PUKi1zALh+^2}N-N%FFAh!5Hx_La#64JaF zscBf$N)e6r6b)QiL+pg492V9`AvdO7qa(^c`3AnpuEeaLkdr6~3G6yYE+fEWc0JIg zG(zI_Vz7P at xp7#m;SUoH8NgK)82EjtBm+J*z;8%T_}7c|EdOxjRiwMPK2eo1P`0@% z6no-Dl!nVmIn#=X44gE)2hd{}|1F8Xkm(j*td1}%$ylKNU0BsUftx~L!9+p$XkrpW zXECMELf6Rn4a3n==K0{1i-2Kj+$-mT at JG1Q^n}6Q5kfsUl0;<}Uk?qj9 z%4rc5s5L864S3iw;RAXxL8TxU6Og)aH at FOwLeGr_cRn{1+)OSI+s}GXs78QB zI6^H4Pnu%ciaKHDUr4?p94q=eH+Qn{cOyq|?n$5e%QY&tmG~f|_`wjfilt~0*EDVm zD=>n0;Mi+yx$wgp at FVWb)@8bSOk;bIB)EsI(%7ari|NTy?1;n}VA}F%^5El;XH$cn zLH%0RfMQaL219{B|7WM at 5rQ?SKHogB(@?5 zjlL{2NK-K*h~-b>RnS_t_j;^qWaQHGE(| zbt%v{l`?AS^aS|dWkAwYE=@D~B&UYYY;GjDX6`9)3%Do1P2vVX=2Wh?W^@%`fa2zC zt`oRsE&$vD&Kuk$^FmUJ3QOrpN(m;VFr*Y(|NSJapA at CSqI!~|f=N*fDasd%;(*jU zs;=hnMxc at 9^Bz$>iy=PTahZk&X%%Wn0Km58=xNM zol5n7QU+gb4 at CRGqhXtSyP8Goa|ljY>OrpiLH`eUshWiUZ}C#QfuC=em--4K{xdIC zhAG%Z|*7P%7dCh)P%yW8Wd*4!a{)!YEWR}IuU`Kly;8LEayzRFmM zdd|zI3Ao+NC7jjG^5yF_{B at Z2@wmjxm#-2!zYpw-3+Ullj)aa`zQz_8g!gwsVwuu^Nt~3Qk#JJ^&m$*Q(~6V2B>%6RRHfXFldAb2 zIjL?fP4#p(l_{Am9Rub#jZz;LDr&r-mDKCbn`EYfbDo~adfCT)DSamwYdvy{rCnK z>e at Qj7vG~=9U at Ub2>r14F!0*d2Hsdkc*|*PXkS?MZSza81D9dx>{cX~g+Q|L8rNb5 z4c|-?8mxM`HvB%>?~P>$96U}QrG;Il>|~%!RvZ(`l-36#-7Tgv)_s9Qmtw3&4NPBL z4IT&DeF=m4HO3e>;xo8c11R4(Px8Gbzl%ThML+5%Nd7y%l)u~);KpCpp;FJ1G*5}Y zoaZ6&m&@c5e;Mx~@s|_HM*>qaoJ7Iy%qu95nLv2Vr#vJcGmh*Cm!bU{%knjHb09*3 zSn3!TKr#OfB_$<+sq!IQ&2LJ^vJ*amUQCLx{QzI3jb>jx at 2$7&Q`*A-C at IX}zJuKQ zlp2@=v81#jA}}dAL at WmK^sY at QS!I-+UFKqEP48*SXRkt57NsM$IU-6H&~Uh>?9)R; zr$+hLhdjZzX&`Dl#%TFN+KN3$r~TE8MBZg=_Y)8+)ZTWjrp=cCNkylI7TAUaU?J01 zk at 7_%Ang6eRLUgRK{or9)_ko6sE-p{DSKQGE35PaIWh))u)nr_^0sWSlLgL4MZ>^b zbs%ujP`JG3?+zDpf1S1D5K%P}6J++U!R}F$#N3dx#3F8b(Z_^wFPBS< zdtbT4xXZ~05HC9js}9V2rbA-h=gKAKeKeFon0Fgm5uj~)%C8lqySg{5ZRaE)S<5^7_BV58^*H;uO&}c~EreZlu8)%Q4`5oup5YWL_}D$l=_7lT zP};8DCj at GXTp=EpCzD5R at EYl2w^`**Xjds)LEF at 3UgLvQn;L2)hVE>W9uPYCw}3oD zX(RU?Euu*IAD4y~W^Beg(&S6|prvO9US{*49IpyctxV#5r$AE!jm(d5t=p27*)vI7M&Mu>Q|O|c zL9@aN^BF)A7;bvJLNX2cI7q-R#8)?(1)M4ABAFIzc0N`1vjut^orAn zXf>tRn36Z|k~_Ura8n9DAd5y0hzBJ%X5^X)Ca1Si0G$uK?V7H`?%^|K?P}XP7=E$_ z5L59tpd)@LKtz#&U2hH~0u&apna)01-OR)_V>2Y7&l4>|pB?>(I6zmfZD=F+(P2_e z%(ON59-06j5}miJ=m0c$wV*oJ!h#gW^Ey88nn5m;WJF@{6EDcfnay06fXG01_tue>Zt at sTPam(#q>hsE#yheJF%_y zzUW%^V`mNauFk5d5a`e?iu6h=3rN$rHl3Mqi%d8m3CZOFU*W?y at EVb+0eKnQvWJBC z*K^MiS&W*jQt5ZhizF(%3!jKRar9Wa;*XTHV=V6?|%aNWlMYdprx zbsST`1&&GLe1W#vXhRAQ>kYE4#INE9LzvQ6C<+4}n{FDK7D^bYZP}_8Vi;*)iEIwv zjbt#vHsm4AYWKO%N0V;i8bcvW52Py(BhR6;bs*b)$i5P?@egtB{HXZ3wQNRb`xUV_ zmCfjE^>Sx}56}h&usk86=wqW+Z8p6ApWAGng}loE4~c5;;o+p(sctoBI;*bMFmd&| z)r9y7Am>nSR19n=Pm2UGvFnGh)Nb|b*MNFR#|@%2ER+-$#$urO%Nepa9`bBD2>~{w zVt8$_Q<`LeenaRL!u~u^aatlw8cBpngNQIGgf@;yl=hIF=8)31gMN70zCmih-%d>1 zQGwD2G|I#Fri=^$OadZ+)4)eDe#C3}-zl>2h@=f%w;7=#M$ zrmYeha|x~j^rb0zcq?8ecaW}!&Zs}?fLdJVk+KZeR>5aCc zA8%fR>yMwdkw{A4y$LId3|W+jmyRv>e3;UIP-acT&)#aDtOW*MZ{TZ;DF?KrM+=?- z6hUD9T(elqb%+08`0oh+f$;AO|6Z269_HDG^5J^1k|1wOb%;@2RdmSi5k(0eaW{~6 z$S8X(hm~&G(dUcdMzHUth#uQFtw z)MGQ2%4K=WQFAljq^ojx8LVLoAjop5uWsS#Bt7qWhuLKaQCcp at x#|32VM&-MuENkp zO^8y?g+tKoQ1bX*46c|;?jZY#!Nmyx-v-nd4vbl_ at YxlISRNu2U2(R-a!`bf$HRzS zkIsV06(LML?hsvOz=-4rU(dMN{7gZ`-6-mf24q|iN*>sW=8i%?R~cw6+&&#pq-H=+bCR-dPO(bt*jpLK*?G+H1*U#@#w=@WLf%=e^krZ(#AB)p!G|I8~{9 zh=UmYsBOX^C%m2S=vn>-ikT?nLxi=z5eb=4{F`e8-XtSXuvk6TUI at _70_Yp~>-wsP z0R54F6uO{4iTAwbf<9h+djRa*Dh#F2 at 1((}562+lG~q!T?A?U!u4;s#v}PTJ_Ixr- zyY0P%u2PGxU3MiseazvqvhHSe4q;Dam!j8$IAl&vZbKWj!jB<%;59x737?u{h4TJ<+Ay z0Dys4_KrHQ>`iQG<_OwMBv1|r32!^l{LRlWwqHB&4jwk at _}f%Zddn<*{9WL9*srK1 zBzp0TqZiLK(f!R~NV|BZY0CT2%b#v)7tz8FDmss!y<56PEL_Q{-_RpyvI9oA{OB&JK zLLSEj7K}#)CwCp at 03kV?Jh4inA)?u5D_Y&NhtZo_NK@)?H;rYt(pQmTMuM+SqDh`u zs?EcBuE14rru9fB51eb zjd}Q$htI at 5W#uyR`odFZAGnQ5HOYx-VrZm>uM&fI8zgfNUd$GVV7632)dQ-W_1NAL z$(?x{Pkk$^+D(p^$W?o33g!TsBo`Jxdf6ea6tVui*+2)Jw`|9ulNSN?tvyel(;pUo zLyH6uiLcjQjwOO8&jQPNIX(n~5WE7yZ1#+{;gQoL*VFkL+cq-IwC62bY2o1-{wmHx zw(5ZrsTM)AfKV)*HD#ut*s|pv_h~y(V@>uDq4J!AzHg=z2BL%ZX*-hHSA%cd2!M=1 zJnP94IFbvCR;>ymBNYS_!kmF+uHvV3AzhyH4?6j$lA9!QX0uks`7)c0{i at C@{j1u& z>zeI|ZsTcd)WQTAI7(D=bwid;)OP_NK#WLoNvKiG?8hsB}Lu0r0Q< zqnMCQ?51KCUj>}1(F&*}OJhx!moIOC9wzC^m*3Eq9O9Ja%bVFPL3STaxv5!GfqI}C zgn>iplppMa{4|tRdcz#lv`-f6BQtx&Xd5_%*6$(z6HBHc>ME^d2cY347#!{eptD%` zj~2?#0&x&+ at fU>`;mdiW^|_xM#G>7Yq1bt#cCe}L)Q*O~VecN>xKGI`p-LH4`6s&z z#|3y-{s!8UC^zq6f55 at B*n+qSbOxZ)9UaDUH`ZLDRo~!GFmG1_-^w4c1CQ{F-3Pch zy9a-Q-?t6Ql$B50KcI?^CpUprS8_39$xHenV^N^Y^%;wDDelR!C|lwNkCg$($#qay zalLJ!gyjFt-kIHr(k8nk671rxWizQ1<<+F~oalF{sk zDCk)%*Ol3B>jA|1wIIs at PiP8+W3QO)R|4|9S$`>=3b5^VTTiv6FXV;C^41(rXq zq1I79)Z<(R)CWZ~rMwLFaNyAL`%o2!c|5^au_ZUS7%0P8D0HH+Z at UFzhs5BCZiSjh z>r(uI5W1Ap_98sco%@g!bBMuxAOnzY3!x9fK}_ZLqrUU=~pFf11#c7p}Q{uNY~=!l|9Ts zx|C^kkUTaFMc*Q~7YfH0EyCeJG+-mmNm|e!TCg1#^fp;6lyS=($ZH!0RGU5hZ`wWl zZ@`#W!)zZv7S*M4PmGlT8sz%JFAuUBV!`VNzdU8c3caVy+*?zoO|o~}CPytjbi(*) z$`&QrJrUc`h0K(EO(d~3FeR7oDPLa4lzvQ@`x-LQ22_p3e$iWwV+k_JQCBggX#Y)% zS^;wLL;c(0AlCB0b8!T?UQ8(p?U-H0B*6H1IEMOS at NjGb_%+%((E(I>2V at En&;Sem zHVWk{xjH9smfWVC22=8;6T)@uR%H?&}cl;1=@e6Pn+B5H|GXu!P|uK)2ml7Q zn*BjRYS5?bfxNKid(~;f_XwJKmFInwnvT)2(aVpSRRqWPXn)-pluDI^eQJmz zpwf46n<%+KW%zVK`)dh0bD|;_76P2q)E7|^?9g~^N=D3c{h%S5IYjeFF?gGXAQ|rl zAa7H80w2L5V+IW{Qr39ixZ>0hr2__Xd@!uNJe22P&km5D-F5ctCtbOxACMnIQ9SK> zKSm7X2w&|(+s8now|%ywv*;Q`%iJ?EEqTAg0_~}VH5@{8JxlCBSPfwZY`ly@fJ-ls2*$K~B4+`7MvtyQ=8_+!A{QWhavV1pTec{@?bhKdAGdd5vE=0j@)D at H zy;P*UO^C4Yc*~YQeIIW5w%2jTX2h0POWa8dk)m{Fo2u}N77`Hnk_KXa)DVl1_-I1v zC4Yjvzb5)#_y&j=0huFVi-y$wbSVr>BB^^N3m)wr&j>7xB-%sEMXHPuZ3{%vC5?!k zLv4y`%g2Wp%O|N4q at E3y8Lg89J#f7c+MwZ;z;WYiM&SW2G-CRU(H`WHG>)Zk0d|1p zt2AgPOv~3Bfb at a^BMS&6`WhZ4^ig1Qg&oeh&B7}y0 zEaVZ+CaTi5jGQ?b1x-w{(%hL|%t84=P%z-Z$>heiXbu++9dk{!?R)U-yDc|Ip1?mv z(l>JT?025b=59LipiZpOloUm|sv9-R-a(v_+5EP$ zH#sQV?5z{`v3#W#tKs6YW+;7+eGW886Ftz3ZP;U<0_oCNps*ovn#R8p0xQ!rYL-;V zk}BCQ_Hg*_ni~dgNPb at k>0$4V32B{v%;pkOr`?m}R#%(5r2)H6b_V1UOYya^4R~U) zXQk!52VU}~fOC68<+5 at Q3t}#^)Wr0=^WAC?p~)UQ-h^*&AP>e|dI~zztFMF~pJFEb zxB&cktKnt0Iz-z^nR9rr9c__+R6lljx67QvyWSJ|b~3yZN;KVjb59|VJK^y1br2HL z+x#N4rQZG=jx2(^&K^jH_JT6ae?R16Hg^TEL&_e#xiD#fj at f&<4bU;n;f<34W&#e8 z`iMC+t+bZ&!sNx2mG98zt(fEg3epH?Li?iVg>Xj+t=S{XA4$5DDwc1k(VmBNV)^XL zmSzXgUB{0(>_!Z$};BE7xdL;zjOlzj?cYOhXdz&%@5lLsg_41BY}8hHEkLs4HP zdReaYoy5O-7jJCo7B(hTXoe}*%9)Z(NK0Q$FYYGf9YK657>+nc$Tr6tG5J+QP0zsR zS`VgllsDBqaYR#*IV_spiu{Gv<4?B!AIRk>*8r&I4^Nk>=bpmP^$w?%W`TU-d8+Na7v4tFMIF^x$H|5C-|^Tk zuora8z&A|dOGuXt<+CDVoj>&aaeC0wsEJhSm=Zi1obb=I6?%IHeP-Axkr6iD8Q$ z%FRRF@{Gnjy&+HcmOM1WUC4 at gMmvWEh=Ma3Tr~m1B- at _pW2jE5GUPE*rk#Q1UbYOr zA(6VC`P)mL$ozYCJoON$qKi;^Gr5lkoi1idM&c!x;)fnUli;!WQM`NojGx)3*bu4o zw0jk&Mk?JpnRlGhDz&rAdBJ>ZOKVrq zF)n(itoV6_jJwQi_8-UWDv{$aVgdc=A*W;z_HvsUN);;s(JRSy#0LB9*5FV>Rb?=* zE%!W at b*xZ0VS=HWVKJ6_3iBhOqp_s$2770KPtz$U^?X$5coj$n?PUYMD4q>Jdqkl2 zW|5wYI=T(@ry0=Bff)Q(4B`V6d5xsdw|U&+DvS%TGKMpOYFJ z%e+&gRa`D#7OAs+jxS)2ZpX#R1*WkvUH{{RoJ%I8sbxX}AA>4EU``H2nIWs*!v&?G ze1EqG&O95D#PZs?K#@9dV)lTE>5&!_WcGzZ5TkD21fN?oD%hoPkjl7_r8N=G-vqT_8DSu_h{t&yZ~Ph zIOsaci6)=qh7(IJ#w{{?^5^WEP at e@v*`9oe8XfFC1m+;wV}t{)tiNn`NNSRjT10Ix z%-&tsqE{F9ni6upLT^It|qP8?It)D3*&lje8>=J*vkxzKDC{8)OPx zHSkS(a_l(vHUzVl%YBUtFXPEq6D&xBiX44q0Ov*Dy)*A3-_S)!I6zJmpFH5)AMJQJ z`Kyv1k2Q^J)Toqlt|K`g`vqdUH6uG<4;Jp#x_ZlyGU(yhBwunA*@MJG!9TYotK8-~ z8X^yx=91$vmw5|ngL_3Pl^6IC-Sxu!bWv>dS{W{vd2n6m+TISNbu4Suc?f;CI7BL$ z*M1MoAMZ62(rNhW0SDNodd+}JjenJ<+)E3A|AQ^=XS%xV1nYRYmIr)s3r%So`Mbb4 z0*%A+zp(sIQKA?;X{3xAo+k9-&oVR&+ at 9U&|7sZc-V%qq?Pxc{z_Hm5 at jn>`rq-Zo zlf>ZdBgm0P44?_tQj5ApVXa4#1a*WpoDyVVE2Y*(pHCT9c_EE*^0xqyvorED+l|7W*N$z%A z9t4>~Fs->0rtyL_W~e4ljzocx--?Yerkp`kv0v0T-A3K2{C;ef7TOiZ^7}0pJUQQ3 zX4i4c?~(Q%?8@^v!MUfb6F$bkF?dAyyUJ2s%c_s%b+;T+>eY91!^}Q>wO(DlxR>RS zCqUV(0s=X2x}Kv)wmm<0zXgJ%J`HdYlEE-jgM8wcy$;dmk|q@{1LOS&KkJ5 zj~sRO)(VRrI`z0fdU6J@=c{NgoQgm_O?ucOi^DWytD$nFwy(3;uC7d& zr%jR0EW(WkuA-$geWF>AJYN+)-sm!6*E0qJxL{#Km4lEQ{=yLW!Y!y=spDNo{B;KY z2wvq-Ynjb{s7Y5k9`fQO at Mb#SlY9e#k=MiDFj+0v_oohO0WMOHqk4a?&1DnO zP4yX-cU$iInLCu$lFRmF+S|RT3Z>s2?rS8flB85ZN>AuY6VZrN<7mD1NWKj%`5psP zofD{vT`fSp{tW@@y8jN;LqE8HYMtPM=>{iEJGjAACjk{O5>nHTgR9O7*k4)zn+~jp1hT`PdKhAD;D at s_a1>HpNHkT<`-YxoO*5sH8^`B at LBZ+;*W6b-r*4ZQ?5 zx3sbd7EwJeOhX`w at i-OqtQS(~8vY<&mj6W};(=J7`2;|)TX+LGF16|$jIN5%Ii;r= zP1S$U&Ixo20kgOtb?5s^T2jRQ;q}&WBJf7;@>_EGx>YK~{((s$`uu2PCs*j!r z9xunUROIk!Z7HhNR6ERmM9yKGjtBkfrXnRiB7Tni5R3c>yliUnP-R*WVVPDuL0F~_ zpvg~ImIry{o{pZ~RcF<9<&H)t$+<&pg;6$Ind>!LCtv(2J~WFCv0sLEN07w-bS7Sn ziQi%?0zx6NFC=DO$wQ$!&2V1BA2#xP$z40B71oyXx?S-gd{;!YQKG^&y^Aj_N~4^l zzNv|DlG7(y4bQqb$w((Bslew!$U<((BrK%Cni?8ORQd12v-29yP>GRjn(L6P5;_ at 4 zy=4zDk_P?~N&a^{2&b)-(^leZcV_s{g%UFi{3_~{TUz5I^&x7mOR1=f8LlDB at O?};)!8yv2Qnp2R}2mz z9pL)#(>#Hn)nZ5YxW_98;{9r!HLivI9XZEA>?F2?446Z&P#Fg!LmW08)iIi)TM2er z$<%Tae7gyO41a=DGCHuRjlGQmU7KOme!l0>-0 zyK$L0xZn$j8o}Llb&uG$Y)@Y;o^5h7AFr(TGnrFaZKtiSEn58IvC3)-+A6Enp6BC( zVC8o)K!q*pYVS0PqJ1;j3d7TJhzlYyjSkPx8=Y5(JzdQaZhlU$ z5XX at 7-udJT@nL}%VXib7(GcNYN^r5_al9IQ0_~#7M4GA)l2g^Dv8|A`%Z{7TMci5g zzm|(4(KQZZ>mDwwhw$)6!v_N|K^`s>`t<+}OqU}VDsA^2t;0WAPs1;@3BU9M4L>OH z*_J|HH~nxRy^ePyCJ1$u?$hysd%MvKx~R3|u7mRy zgOLu9l!iXeo}P4*`}Zgj&&npsVQ^v&QzNN(!zv!aJb(cENf>aL-oD|+qM_;V20T7~ ztxAR$eqZg6wp#I0y>+ at 0xp~xw`z_W at O|1?ShHi2l(hb;zcljQ(m!h;1hi5z9(S%B0 zRFIS;eM*fye$4llF6 at Z(13eK;M_j&Tg~ zbXE|%DgAKO!^8msNDNj$8|~8yXA>>4zd)jAesbQOLS6I_P<6{eUyMYm&`OuY^5IK8 zQ8b4M&$}uT781qG4Px+6r~+lM)C;4I?0~*eZGb1Lp|7^zj z8le^4`j~e!8n<^dIU6Vlbgt%+t3jo;1$pYR-%o%+TG;B{%*F}rzng*GQ`mFE;r?z0 z&jnc5yBQdTuENFdTmt{5??}b;U|{%7qFJ}6aDj#j+gcKS{!h1)fmUy32;2>*z@&F? zXqJ&`dC^N+UY_Fv4vwqP$fGAPn`gS$-^-Gqo1fECC?;j@lHHF4JwcRg`85O*VSHxqX& zakmq9CvkTXx01Nk#N9*OeZ<{Q+ylg|BkoVcttakL;vOUJapE=*_Y`r at 5ce!`&lC3| zaW5113ULM6ub$|>P25+ATR_}-#NFA0^ozJ_i2EjSONhIexbulSleh-rK119{;`S$Q z7vg#ox4ApIzZ3TiaepFiC2=KZ>gh at D^YW3YzDm&N`q94^S=8uYZWAgH=wspxYZr{$!LWq+TMr+2z*h za$)@iWS7Xf+X%qh6^fEdZs3^yW*?onTdcABvSkXrhzc+<#(V{tPzciTRdyfY!BmIn zlr21>6O73HTIkJ4s-#>S)oAFK6T(_2dk815Iz;J6v6$#F^ji8YYE>e73xxb+@}Q=V zrMas)*w7Caw3`8ibssGvV0Y4(wLZrB0kDhGvX62tW; zwiP3K!$Mk*N@%RT`}HPN-99VE>lp1x6es#4;r{GJ2f5S)xktd~IIAHtK*QUp4TU#< z7bOObDOnF8`;dyk(@B)}R+Um~sw?GGk=I`cVry!MxP2D#Ytdao&BSPM7&O74KF`g8 zp1;=%Ik{huRDPLc=k$+Qjo9`+beb(56Z#bQBxFhJg|E;_mN=TF>mylS!7SLN!3N3H z)nTFk;}Xvxi6MoCG&t0Uh+2dmFiflXk%~aMOcOIwHAqg))iL94l#jd?0DUX7|#dXYGfgzyUtQLSjKe{f|6kyr~&pX z6p}0?NxC4l^(y2NciUG}TL at W~oo_VT+6{3#YF-7b03nH)S9cJ4Yc}Jr!tS6B9nd176P+~ zo^QZb4)j28p<*p1tLCk35ztOvO}5PVrs}(p-h5gNo*PV_IGDSN!C}GVs#7kc7b4ao z*JB^FHptv>t(@!KxZ8sRI(e=00*nx_*7bzvD4{LT5Gmf*$Zta^yZyyVtitcxP{EQ= zb&<@!Jt|<&+eCfdHVOvqciw0a9$ujlvF>^B?Sb(0jCM+cm`I`bV`6lI;xRF?f-R~2 zm;&K&doh+Dkjj*C{^TrJxO$=8+;ZTgd8H6GS$wo}wBPG4w9~z9sBA at etGzO5RKeq; zX15zf7{GClj~di&6k&%09-r&x`Ie|Q<4v+2Ise$KoBvDJcORd1>3_-kLc3W>RP5uk zlA=|Q&+7GGYW?lpZQllxy>Kr zr?i*VvRoxI57MPf1J3t=j;}^O6E at GPXx9@CJ?~jbU#KYfY4|$6 at Y3Vf)A!TbYaGGM z$IiBwDaEM5XCr*@d!q?PKOpR!)}|A!`ay~+Z?`C1=~3;~kIQ*qd#DDn#y9QaAW}{L z_NsyH(C;zfw|;FGymMuo5 at L_F7c2BHckit`s)(vx84ZZGl6kv|uOg&sJ7{u6+d?B< zQK?c?H!++UbL3 z`3M;oZCM at -Trtee$X0VRY*hPmGZN2?~{vXJ7RmKIXp z)c?AWVo$duk^TSh at 40XV@s*MvBKbv%&@yACS^* zm;7~-|1ZgZQ}SPy{2a+|XjA^=K~j4pKU4CTNq(8+zc2agB!8FWAC>%zl7CC`y$4gj zhvW~H{Be?>DESK{zew`mmHZ8oKYAGTpOpMqsr(MoZ$~8cf0z7IlD|jtH%R`6l3yYD zOC@)IS0wB)NK-&^wUJVpJ>l3yqJ+a>=i$zLt`Ws;vG`8vrTE%`&Ge)O08E|Tx* zgyTT!pOO4}$?qZcdzbWkz2vWv{4&X3BKet;|GeZ+ko-u=?t{|4GT$NdAkGzfAI9mHhW5e}m*7ko+@}e@*fqNPbWR_4`Qv zP|1&#{Hc$T=IuVejmy2Ao&jlx9(S+)X(#hzr9VrANRNR zGZ|63(jb=;zu-OUul|twZU44D>p{ZHCQ1J1vUbz8_g5Xh(uh9pZJF at THG9aoiykCy ze6A^(Gifq%O(|S{EQ{-=E)#=GGqyH6B4JhQ?;|=3%U8( zS&}zCJJ&QTEOlsD>MUtOU}wlYnUScAo2-j7YR87*IL9Suu~6Cb$(d#o{Um?$%o&+n zMpm9{eia(Bv(o2G$jx5#e0D}^JeTVXhfH%OWalnQ=ICz#|D5>b+&ojFIR|HkGu at nV zP-H5phx{HtL7GZ)CY$o%cig-MFz$0;+OZs$o51D5T#&VdC$tUMFUo-#HHUh}>{K(U zY^pgc3$Y6z0vLvu!cW*D_%6d&rO>s+Kx4vy)R*bZAr%Eix^F716Jyf82bNDks+zk>S4HrJm5~66iDIayUh6PoI)wWZThst$1OJH z=H~*YqlsEzBH;zjgaeoiTOMu|vZ7Qo$*<2^44_O^rKF{&CUeQEMak(IDXJ{0*+wBY=Ci=*`$Jz7>`IH6GKe}85|i_+GJX`bNb2>veUTyWLVYh ztn<9Ki=50SGT6@}S^^(hH8?HX3{*fymeliH0H$n99Niw zNnvdyXQi8DIV4e7Dj*-S6|`*!;v{Ka;vh&x0l%2-oS&^qceNP-<81fPmi{en>0hqt zB{OUtfF|?jXTVOEpPWa=`LRW``jg6oaxz+qB5Q at ik(PGpp)fd^fQ{OKAX{b5GmR$Y zOv{p at BxKPaMrSH4-o>V{Je*ejRS{+t28QKDIOlF=b~;`Pm*vls$=W2BMUIG6O*N&f zG~-kRb&giOKr+t`%NyHzJ>{8ToJ at tP#RS7 at GV*dVlM7v##MwWDKQ4#J;jVHgnF>j% zv)f&tluUzmMdTE^Mo%V-iHU(#2FXZniWlfSHRq<7T0#&HwxUVkGZ$za=4WJ?FoSamlkhxgVMz7rCTVBP89#06RPE%%InxrfQ{BT2 zapPIT93wkkr=P5qwcqbF=n4=?kL!4R%ZB7a<5G^HeiVJ+{GC9;RP)RnM<-qztdZ}; z8d!fW>|9crw8+uWMZia}y|s`MNwft~fUg1fOZ#Tae3;dS5RK0?CFhx7aY41P&H%yZ zN;^`{BR#;~H^Q~T>7PDtTAnGF&0- zzyvg;lWjq0!mb4^NOmITj5GvE9*1kJzbZW&3Y==nftf5#&{ROS80DO=qupZj$-Emn z4=I_Jc?LO5E$iTsaJt-pj&4m(IQ!e04uF=b+S{=yO at Mz9xn%XX;1OxaO{x9px=1Da zWS%NH3p=lJ@?9AwL|d3O^?yz0=Sl-J!9703)!uogR)xC#{lBr^Trg{A#X;NLr=x2F z$M9C at P>N{@mk?4*`bWtSw{R!Z!HKY at OK8*vGX%SGj6Ec(2 zCHfbLbxNMYy1*QW2k4XnNB~Wi?IOSpK!Fl0LG~ecy1c!7DXs)9c)^^tBqOH{b%0qU zyh6iW1vv|F(#wybm1R>+$*F~I$lp0Gttqy7KsHbUqE#6HSHKr4eOj9tusFk%kBcg0 zKGH<61Pd`KnE-8;zlj5!nV$C~P at h=eIG8X_YOx*4Fu#Q+90vqF5FNR5=qj6oG-Ymf zrV|~>va at ptWU{ie$bw2XbJ^$vMU#R7?c^o>mYO%miN&%c0%DPgOGZN62_HbxD7um8 zFW2vEX^uF5OB0Fmlc}&9rDYQt6KM;V!DY6hvk}=N{{ujx)ITDoWFY-p at DHK8UCD6P z1Lv8US?HoG<`qf=eV&XGQqFnSY8 at r#0v1~A;!f$`yOBQmPbPP`wtb54d)@ae at _GHv zf}D|`rPw}sC-tM at Wv4rQy7f$9AKNeI%T6Bsd$?h690%k!%7jE+V?B3czwv*;PEpv#vb|i*O4a&j+ULBQat6v{;tXOKW at 6SvEP6_ z-#`D}JKGO4pZ&1)qpco;m1SOYHk=ID7i#nW?r+PopXz9spY-qE$yH+`(l)PR&n)(S z>9l;O>gehrLBCutFa7SNzMD3Ce|GuDkE4!l75-K0Q!wJLjCpOX at 4F$ZJ6zZixL`%Z z*hR{}XQsX(pOtdu&Xk1<=Ot(MYKSzIPL|EP_2)a at nM*&D_pG|3e0lmF=C6Gx*PG&P zTRQ!|br-*^Y}pjk2n>4d)d#gnheM8Cnaw%{xoINXim2ClZ%l%jmaTto{{F}J11 at e>ynpmNE at t8{zki#5^dG?kPkUOrp2_RvpK_^0 zo%;4O3l;^gow;C%|KSdo{~`LmHE5m8 at 4JtDmTTT(ZXEqY`M3GEYn>GJ^S3YyLYeedTso!E!}>3ZT(!~ljFyW z-a7KnJ=G1-Q*WJ%yP3Y at Nc!}hpa1H;>Dfu6Ht0@;2R0ab`hDB at oMP16C7$f=>vPXd zJD0xp`fFLk3U_5pc=h|_gFP&1JJ%e|4jXlG{?vE!1L_nL{da8Z>e2tHfnL*gskiSP z?Ek}tH)n1P_gwJ(tS`h9)x!?gZ{j~X`DE{PXJ1%fo%+GRTPc5h_{jpZ-&>13zV!C} z at Yzuv?4{{Gxt*uWyk36p$9K9c*?MtZ_;*XAdu|T+;I*S~2JZUhw=LJtEbDjfwEwZA zKff~W;<1~}-~T#4%W|skp{~1DzA$iG4bGVzuf!LjzHu1;wlLLk>N&9~5bIE%=dd{Eyc6j!Xr+4{p{O4-{yL+DVnrXf65kdC! zVLiLQJa5$S-3bxlo_jM>wRiHDzw*V*=WE_&FE=jR`+_h#Nst$b*K#Dx>u*$qb8i& z;4^#QONlvuMw%AhX}UdR$L8u`|1!VPt9-;7pH=sFe%x)lwWLGg*)PMQuWuM57T(U= zr4tq`@41&9GvsWNS6tx at P0>8t-y-6w_w`CVEw2cEJ9bA0t3?>_dG*QY9|QNUi8kEX z(eL#y-u3(GU2A7wS=}t%uoGH--o2~?2^aDcXYbe^oVn(c^`SwESH`nT)-56o%^l|0}-&g(}s;m7y zIHCTD2}Sqk=bJ7z7C!sb7G3BluBUJBkwZFp-rAWk>3gegYR*|x;jrsPa~-#@T>Y{7 zp<~$_^}mc=bFl8-s0zytpEuqwNZk2djOmNDlU7^QeqXNcUwdzpZ`r|hL2-0O< z_q_c at _4QxgJ=tj6>9cOFwbzz~XNP?_POAC9iOeSGOd;qcCdUlu={ zx#7KMd_Uf>BD&qC1zc{A8&3Q21m<_t^7n at UYSj9R`nE9ha{7 zGO?)gUg5Nr2j?03?u}mg;++Aj|M5kK at 6Ws&{8dj`;%_sD#XWd$-n^|YREd^%`I#SiCq9=!EWYrTEm*@u+_u3tI%!-bt) z9 at Kvk_^hR3V9|y*p4}E&pVJ(CFhf1z%D5%@4`0}{&~)a`%%Z(d`zBml9Ib2mZk_Me z8y|Q2XH?y at 6}b;X-@^6(^nz-6*0B=Bj9n8v&aCzIu~#4T-muK-S=u$uYwMKG9bQ)- zVUCynJ at CTyX#uHsd-x{2)u8l?_{eYU#9971pA76ZvG#KBKKcLZ`$X@#eZKa8s^P?wowf(Jx8x-5BZYggmtCEdc{-~ZRpZ~pc4QRwjnE1MR+ at JIdvkEz4vkBq*ZpDnCAc{>{X6kEm(S>@-jJJ-+4nS1X$n zc6>E@>cuY0;(saHHARTIIP+YB>V;SSOq=oD-#$$m at rHf2;{Fq}W@@uvT=dj8bBcS8 zys+h^rt{Yhe|PcZXL*-qeLv>P!jq1x!_&9^<`uZ)a`bZpuQRI4*Pj3AUwzJAfbSCcY}y@ zcZ*1ONw<^&DhLB8Axev20fK^x`dc#t#+-AW at 424$dgG6G7B_o;R@^J^wPV)a11H`t z;2!++H8pnU$xp;*V?*z0-E(bI6p?$+&o4>LbbNA|jn1i`8)NzO@|DWD7gJ<*lk1jc z6HQa=PXp_Cp5YamB87%?M{S)~#tj+S#wdRn^>L(RJ at 7yMykAL*eqcyPxA&WP>iztn z at t)LGSa<1NwZWXec+gXi9&W-&AGs6C4#_x#Jz*U09^q-l9hOSFo9MOYkiaX=oy5So zmYgL~oK~T2p8oO8$<)4p`IOy(j+~Jy|Lins(X2e5uXos-8Z)Tzy>2PD3*5djx|PX) zHZqp*no5k(0(qQG^ho@>I{{HMJJQiXN_dftto;!Y=9$4=6na4`HOB)+pT7vy9!d@| zE7lCXKu;G=?)oe&7r9uL-o{)yjAvQ?#j&u$&--n)3KiE~s!FG-Q~gzy^nsftd~bz{ zJuiC{!X=xF9&qjEZpmE8EC23Sa3{SpAL;g>@ihZMLuZLpQ{rD?U z)n2xUuCYEj*?vutrj6jUR*OJZVrxUxY)9}s)4k_LSGwK{XP{m#2w7Nlv_wB7F8^@1 zkxDn+G+uCH_S}nWcBfiJC-1CtHuOayh;EqA-*dtV$C+&Dnoxc at W&2UcYi?eL(o8Q$ zmqFtzf6?lZ9LldQyOq)pyRXkxl?+(P5V?A=I}JZ^zFCz at Ll5Km)?6r=jie9fSRG-M zj(5wiWbUtdq~J>=aaS<+>ccqtMk_HABotm}BdWnicI{pI;)Z@`c>1f0qv2r|4f7XX zUO~i|y7DSLSG1klk z-kh{LpXfcty?w$l`Opr#m}Np{aD*gZ;(NVhE^TAk1_8odvbj>V<833G#qZ{lenhtL z%3IOh*2Z<_c-OAD;WEpo`iMW^%gp81a%Ne%l6GGPg2(BsMS5HhNBIn%GhD9LSTKRwOS9zGwJ5}mU8r at LF8Ck4}B*-_q|+!@EF)0tHsZF}=pD|UC|d><>agxfM1 zrS15fKaEIF%Q5`8$@&VP!A{NCfAUk at sOjssO|#c7#S!4mWQl6SUQkz1m+^* zbH3}d`#7lem-Man9aqF{0>l82ZBgWEk5=a;BI8(jUazowKSae0%4 zAjn?UMB-`raCHAj**Skv_{|AJ_WO?Yk)D1NA09t&nmDFUQk%E1eEi5(piimFq|G38 z9fengL0)g``F?+#fFEU9uWr$>k&5JsIBGJ*mC#y-^}0NlI5X>XMyYi?Wg`%eICZ6^ z_PP>z!ExT&qlyw^GCcZ at -|l~&Su0Mo(mYXCGvqr==AL5GYPm^(KZ~+|FGBvb4-_V6 zZV_v?5js3zaJJo!Uo$H at n{A$2Krs^`=<}kbaQ at la@$|9j2;wGHZvR at pd#_t~2ot)F zB_3<0G`Fo`{ADR2nf6B$l^tKC1gGqmeN4A z9cfFw)pD=sFeh$H_BH$nqJFaJx?0jTHa;i6a6M~PSdYoEiY7~*i=Hpw2{@+`}7qv z0~|X$&21)o6Vp%&{;7|qkh(oV>ieTOMc0FfwtGGuexPenGu=^QwwWfdX++_(iPi^t<8bm-WtauU^RFUkELY2dqvbKt_uJ4q=iACWPK=~_3QVs;9x zZ?$|%JZ!R at V>J7Pf@}IwMh!CKTPDi}MC^U>Odp!dg)sjgvwh1DQ~y at -pW9l`4#Pmg*=j&sjbKKH$1^-_80 zOY59dT<7{1y5>$9mVb&u~0n<2mE?`=wTMmjxW-*5z$FeFjbHV>&E at 8BaNl?6B} zZ9x{ULo!JgF+tf|3K2Qv*kK;Nw36+v`m<>z5rXG;RH$1ogH zgLsSVpSz~jHYRpI$7$_H@C5}TM7hP#`Y7&cC+?I6(#~`D$>Q>^;Ljc9!Cgna{TX67DgYN0 zF%9D8(YkML--ma at Ygnw8Rf+j-n%9iGHnJ$dPL zUWSH0?d_SK!X at XPT%pKL4PncV$)Q;)sgq~!Eb}tiTwN$$-9o;;KO4;Uysyr}(6G8( zRzByglid)fr}KB4^8$9&%%`uclM)i2p<)tsn3EUT>Ng~;d+)66vTbL<-+p>)Ctskq zDUvDCBY`kim at cQ|8%0QL+o`fyAMv_%QMWChpSG)to$$V+H}qyti*NWK at YhWJWqT1k z;WOGy{V6n2II$G%QpK#R!Cj)$y`s5&9dm=8UsURuy+ez$3A@>Om;P$G_dnoIVPpuIIE8c$)Tm7 z(mPDU%D^V-{QM#-{IZD=WH##HZAeqUJb>)dloXIg3IiLoBB|z8hDuE!=I8 zM1IJj3g!tTuCsrn$X0>WqIvVpU!H5x!poXZ(&w`Xn_}}Z;-l_(RHx6XXmxzNq!?T8 zV=?P&?7uGXdS&a!kMF%*&yb0JLxZ^&gQG;gW+oE0*LG-omzG$Z7Yoi%Ihj#LLfrWE z5oPsY#IRGH>9lxIOmHJyTxrfOL^bg*cuSf+DD~uj6P) zhzmT5(fUgJRT;0|jggrr8T(U;qO zv=7#|b=P}KQa(?oJ*y_{SZijAEzJ^`wF^DHe)_SU&--y_#mZg7qfQI*r+F=BMHX)M z5$1x+e>1f$i<7u at Td$6m^%nAnBwBIhA1EP=H4A0Vf4Ts1+hZ+F!fu&}E;m-KwU8 zfq|lZCo)*Kjp`{%?v&G{y?CV!9o<6+T$waAy*2EkHKnwGpwsy3uXBBpC)IR_=_yZa z6t!?#`=t}@tBC};(?}!#t#=9*y;b&#iH;sxxqKI>L{?de3CoDsv~6LM7TjqiKFjeP zibaKqM=kF~o#yyB*fIYW8JpMreb#h(Wu5g%$HG1G6679xRIsFEVx7M;lC9NdkY-nU zMPBUccQ4-;7Iuw&{#Uj?Xc3>=Q55xTA+D~?qsrNck{l9BX8Vr*It{1S_S~V6r;fPt zo%)2tr7s8ud*9;4`qkkhXhSKeHCqmkip7(shfa`PUSA+){9;Ztm8?aQ%|k+Jpm33i zy}dWY3~M3?zY(zTzkgyoDw81JT6SzXX-ug^L~1m$#*Wl$~iN(+ePp7j at xnl zYS$M+x4e>1F?nhtxZUYCT|J)Nmh+E%;eAu332}p*h}Lh!l*K2&z0h0wc(N~E?&ItI zkH6b@@YXo^%Z59New}svny_Zq7-nzl_3o;*z$ILpEzz^4cW)AzInYR&a93Cwuk~nJ z6i+ at iH@~-JdGb)3)x1rt;^ZK+5=|bRiq;8rXqgswTzcV@((gI z6bLr%%1NnzmJMR-fj<$Hl{&2FCatt`S|&Q?nD(Z%#$_Q~D_xHc37zH%Vy)fom`fMR z+co at -ENXW8KfAJce)TG|)||fO`!K!2 at LI#SiNXe4-wqi$$z8ct#pkea_GrNTj!W}m z?Z}5Ex1#A4 at p^;TKc?+owO9!*`;z0Yvnz{!SY|%_Epd({qr(G*;hu&GvB6skABI1AiZl)?Xd-|DSqq?zPS-{k$qEM z7q(3dPx&;+U9c0I`t>v7=aXq~L*uEobGpy>A}MAhmd?+*eCn91&xwBdiDm4?Ih9wF zc4Si%WtQtt*Qc7E at zez(O$+fxbBBe-SDbCf*cc7_jDAo)$V%bpe}3A3fL=(GtxcIIE>rMlXy2xD#2mT zD~Vg0H+hYdA+1;xh=ii5^uPo(D6(4TW`O+TvRGmPL))ZRsE+* zHUsI4h2HWNdR+D_YLJQh7)hZn}t%r&!ChC3o_A&lzem}BONW(3 zIq}12s>Zt?;!V?a&&_TKp0d04;?AUKYhMHB`VAsPl+(R=^GTd=9Oa3wmXEemFX!jH zg!IfPbu<`sb5 at J^zka36IU-%T>oRw}`=Ql9NtK5yk<61}C-&5;o6bBidK$^X=5OKj z$ZW=uRgQeOcJmQL%5U1puWy-KidR|LX at 2ALjHM^`gxOeju({mnNImv%|FQXHq3M9QL~D&*mW2V_BQIbJFPhI8NWpZTqgkfD32#quh>9Dpg$FxqU?MB5SFn zp1`q$rpUaV at gtriwv~@d4|m=qTp+k_9AcE~Jhs9_*SuZUqOVFbCqCNyI at HELVE+0b zAH{tyMY}=r!})Ht)&nMZ16+_H8<=cO1^RE*pyNk5u_)XTAjRo5{VtY$utUH6A?hrANe?&L`@y>$2gw zL4yU2YIW_OQlD2dPE|)oxY3?lr^O4B+U=0wxWlq`g6-C2Z+P_loV$aCzc~f@`mx6j at h0jM5o_t(bi at o;w+suis`Y-)!q;h#N3>y at td8#&q;mdM!LHqUD|>ZzFUHV?N_c_Rt4xG<7#5{eaY%ASu01%Ad1*e;^QysTYNa*( z at 0*t;GVFs0Se{Cl$o5AMm;0Y98=1HX7j?YPZs_M3S^wbihY9^-6HW_xwIo|djxVc} z`UFxB+Duk>QPj2e<{9Al_n*%y`w_rKbL$nj{3nfL(sT^7q%Dzn?$Ue4Y4%LYc&idI zUf at P;%SvhixzhEcwY9*tZ124!9C*QSBWDL>}-vke(Rq8v!{EI1A)&2KN1dNC18T@;?gp9f9xy?f_c~ zAaOv{0q^biza|hEsDJQ&|GNMY0O7s+{ci$<1#mvt;s6N*0tdXf-~akR7yu{j_rDL2 z^MJ?p``-o#7vMUuMU^)ah!WtB`~80z2n|F at go}gee-9vMLHOhS{s&{0=oH{`utl|3 zC=fZoZ}$7&5XdpWnfv{J1IR_d&-eS^0SFJ^Hn2tM1DDlAYJk7)_dhBPHK_mn{r-0Y z0{Vftf4~1xZ3_H}i at _EZKNtw;XX2Ip{=W(Yv@>zqe*a$w0@{IiV!!|GP!C80ZUS4B zzGxsSfIsi|zb+8EKlB-|A|=kKZ<4lwb0-8zxfaL{|L+e z%b>sQe+|q2Z(-U0G?x9hLVw%;mw&MTJ}mn$g8sJuS6KF+ie>-Lu%l^l)?7trR+y1xy z!Ttxa?7sr~+y38T+5c at U`=7zG|90qa`~UV2_CJ7S|0U4h_Wv5o{?oDSe-g|7o1wq$ zf9D_Ue+0|^??Qju|2mfa=V00Y9G3leL4Vu-Plys92cpBr!J)>7K}Yd%C}H^c5Cc9A z9t}PmI);xUMTvh1qQ}R5VDz at lfdx9l;ksrRS%@Cr70xqs8yRq=#S-6>11Vgb*=A0FglW5GjNQ zkwJ$batIfqfN-G0=<^vfCs))`aOQ(E8EXsF3D9qU^y66jz7T{uO>#gjaNUoNUP``y zisa_wVCrP=w!g0Q at AD@&)Y{koc>Z({`a2wTLdDF(!|Y(==7w6&YvE|l<7CNWYVU+8 z$3Z+(n_ne&MX&7NKmYnm4|XQtV{KuAdM?(?)eMz|gNeJD^OM4Tmzsh$Ij#^ObYT<@{=*hvt-P&n? zWi0c-1KsEcwb9Gk(F^aH1U59(d3xcg;5jYo2jk^y3Y1RywM0tG at n5Drct4h}BPAsjp$0vsY7G8{@AdK_jP zejF(r<3IC1$nzjCR35az(E&#f90PESz#RpS3AkgxNdbrI9|va>cN{ojQ3VhYcd2K)xYQXALI12v9P<`6;U5MQb~GxIwZ zbo`h5@$)%OqInj>ZSmxCr>c;!j8Y;<x-8!$e3sZ4V}8&^&rM#63lLfwc7+4Ov!&%}4(WQq|xv_s^y at -&5N%SM6 at OY&hI zgxciqk at Fko*D@%d;;MI^d`|eWv2S5?_$vwL>L at HRyiss-P1mrOPxv(ZbtEjN4A>J- z*Q&3 at m;%Qi>>Oo%gSsE&cN>-8M++Fsiu>kFW#f=6N-U7 zRAg%%g=IVg at teY*=g#kS(7i*#Vktp+4s$RB{Gf7x`Ha9)!RdeZ)oN18n;XgkV at O!5 zGzdRLwZLAjc9pON*e?x8Kh5C!kZkp)LN5rvfCNlL<#V!J;*gr?nKTMZVgx*Q^{E+FdUGt^ z6zM(`mWPA|DTDm4$^^rUb*x-wk+9&S7`DKQpFDHcW|d_~wX-#6X~37u_mZgS?)cs) zthg1EAD&;wTO)xnSQ8(Hr8?784H7|p$ALcO)pJs>5^I)%M_?t?nEFHEcpfL#!T5*( z at n6j!BDfJY7)!7%Z$0 zWp8h8O*Jl`iT{Ih2Zlz1@<(9_L69*Kesa*IHC^YU_eNon45<1%6p8u#G5P4RaaaKb zV4sT7Vp}uLi>YI<#tD?Y&?EUbPc^SAFj5w6;u^@pijxQ}wCm2(u_FAr-pYKc&WC`TnT!O60;49V^J? zwWpG*n!qi1N2f*@D?C7(<73cHqV&C#O_5~H3-5T(73gFQkHK;%KzsLxeO_j$tgenE zl51^l5A*F at e18@zeFBq>5}iLIWf>F79-ovgkUCQ|-5G`D4Wq`7A at wzjmY=PG!-$rw zOuG^X>eo>my7EWiW3b>%Onuw8H251cMquqhV7%NLCauTf1}vtGUBHP}8-s-!qTAOB zYFtnfkspVp?*M&r$a=%%%!uRZR4T1xY;%tmUajUI-uz*7mOwo@@HX4z&QJuq;(*qz z3L&-1LIJu@$IeAS1vb+^Ujusi8AyyyIt(SFf1{ougLJ+ad24ZX8;Yd;4PYLqbU2WRVwo^Mwla z3MYxwUe#BxW*e3AK(9ka8!M~x_!HriB~X=VIwHptv>)<_8?i{7{26{{{RM zmQ`KdbJa>Zun?&%N&hVDjN%Bc6$DWyMh_X?Q| zX%>aE93HPtP;M{d80_v7&6NB0L*<4rfmBgvC0UJ*OOi}xCZ}wtu2kVsJzvks*I(qN zw3-Jc=v6z_&7Y_?PgHVGlR2gHj><*+tQie!nSI$DbMiZ^dPzATz-!suv?>g>}`}x*ySi9 at +TBJ7?b%1wEPWQ%K`lFywPlcT|6R9_NScN2RcYi+3Nd{A^*+ zg%$En98)Uyd#w_^RAWUhT>jwjM>EB{>%0CRSK3Ym8CIC~e7~<&_L7kNn37 at o>vPhr zmAVQtiJCPr_JX53ci9x*FTW_23tZguu^A#cD7SUQ at SnGnDFuYkvoQ-hq>3fUEm3E zM8&Wc#AmUn7tuteq6l$nL)=goMc!{qSBO6{ zn9o(8)adeTN`=8;T(L7<`XnZS-+21O zHf%NQ+uJ5W8*zB%4x+bW!>_OL>k&cS^bF~ClcciQa at n63=pnXHlW$7-Q_AO08*>*J zS6qH2Mnk8TgIDz9)*j6}0kYAknbT>~r8~>cRDzFOjm|-*RZq?;wT#M=@2p>Zfn03s znWGez>3r#%PkPe#4rRrqZ1UYXrOFl={!iB4mtDt92}$~DR0 at J@Zai_l9Wc<7GQyN%ZU8g(~5EvOfZb5Xn=Ek7ZJpYXtk)k8Hz z#eC)Ku8UntRd#47Z%!c#a8pbc+_{6Og&FJs>HDhAgW!hx6!w45qP-Y(#X=H$x zu&8!UlFC9Z(fs^_BqRCjw^hwAww&Nc+?M<7%zDkI8}Gug8|||+ at YE4LuIF6L&sjDK zFL_)qQ_Qhb3D2kOh}Ty1)*0;QmJhUvSkkC$3E~woEZNg&MpT|D?JO6NP1bfif4wgo zlIbL23wm44$S!iKYqoSjU$rS8E*GX?`^Kaifj^}dt`N{fjLZC>8Tu(`pn9jhMy9cW z$MuzZc_HJOzQ-3EU#KTd+VMv9qzKbfyvxX7mQ at RrwM~kEDW% z#rEM3M1eK=nxg^^Z%%bWnOJUsdP>JFs!0tv9$(UNB^jM<<&wARzSOaS?)fuLj6B4c3C+u{dHIVgRwmmV!w)52z1 at HK$)d|E`Lu+qPjYn2>xtv1AF!Fs zBW_6tnbQ*SltJIm)jzv)R=)V at bD{5)+`eg7`EI)Zd=9 at m#pGSIRh|FpnnI$TUYmNX z$Edq-%ahOVoVjOrZYL|>yT~T#9r!e9upxYX_+zL-n4W5Jzl3Oie7^4GV#8oW1aa at 9 zC|L1VLLm|4ASVGlhm}ib>DByeuC%-H0SrIo16c12S7b>neK^IDq2QFQ(4Y{={q+WK zJhCRTY2342HGD_Ukb-R9*_Z$^OZhckr7(pcnz at dZa4Y2;nOtMJOaa$@cmBTE)BT;- z&&WO`QBM3i{P|PX_dLA$>51?s+_HCcjY!@ioLrBdcb}NPM5rFDZuKaPa_dfz1WUm8 z4uon1T|0*{l~oxx+t4Y$Oie_|?Z4qSLcOuK;w*vzwBUzUf%8ZRW zdfz9LhBOZ9Hmp at E(6FNJ)xU|*(9v(sruiCO_%59!iAe{=DDu4Z6DXe*AVduz` zA3?qAx%0*p^2zRRA~ePK0ycIA@!notQ0quY2$p*g at VVUlT07oGqk5bgy9a}%uxh%H z{Z9SeNx7n&D at tqlGX6dH4xQX&43diB=RBVS7garJCos5kszo}ASeu-+P at KRTjGkF*XcIoK+Big; zckc|GNmI=r%`;k8J)l`fjkmDT$h}bf=n-}mcndY- at MDRKQ*s7f6+;Hi3hnCQgW>I? zrem~A at s}t_-I&g@=9Q_6c)>3 at m1{lZ#=$b1RsG#90BERwE2{)H(5p}KLShL18hy+kf7#3 zA!;=OOC!Rt at 6#=Q1 at lo@oH(k!q7K!cNjtnFMq$BgsQV at TPlX>c#QOZfC$ZX79xMMO zEdBgg at ddD05Q~Md_!Jg%Vex4!2IHOS&+)E=Rle0$vbSkIW3X^u2-+ng(&xrX&yK}% zSm_n8SP_dKU*MYw;{)x9haNwfgIGL-#ZRz!7>h at +m}uZmnx_G{J`O|OKdaH5P4G$K z92$e=?mz+8LgQ2b8+e(i^R?M)e9HVUbK2{R@&Gf~R$nuvS3~FJiIthF9aF|8ISe6+~MV z4p<7qn}hW|wh_PiBNi`V>GQuHqh&M($|H>_kIzP at Ur4!%Y=^V;GF9-K4tka0l=1pe zSZf2S{hrli-*W9%`Z@~By^Cp|n$X0XNrQbd0cA-XzKHAEP^@<-p*3hPO3eNF`RjxB zfAX+M-9W$xBrM|^=6>qN^nFGHx=~naCuE$S{su9J*cf*G0qm&=eSd9auhNI|9 at jW5 zjvRBpO`3)QsqI6g)Im at uLfR?U#-4D;){#P0z6O$h*L`EfjpF^f6~`)(Y(uj5RtC4V z!I=9aaol7lHJ`|)+~u4rvj4)(9PK(7wlfS%ssjC8qKZ835F|tY`h#@G<-A3c``3Sr z*W~!;BofM0 at ZP3379dOZiSFs3ZR3*-co3z3pG;mXbH${nrGXlrSY+Pc at mQE&D8ba( zY>893$cnrBJO|cxjnh z0lxy<9pS5aoyj^s=g%mrHTFpb-e7#vD)_QWu}GW^Ua{0za!R0%(87}R$K&}{r95wL zgNE3oqUYbOqh6d~R*iWJx0GotK2@?=gO`xfA)B@!omN0~Y0;rXqAB!*lU!bwB2D(! z&}hj{x0^oS#FdH%{1?>O^x*B!vY6H~5 at m}I3zQGJwusx>G76rU*iuQaKGMk*)-IBB z!`CA-ksf|)a;A>x;gBV<;^$0rJ$BU+=ANAxBFCjjx!MBv#8Rcm0)o71?%O5NSogQX zcCOC)DgeHNVe%XE!Pm&l;9ZQyb?*4brK%eiF?9TzHqS(nu9ElNjDKniDWIV#u{BowFycBJ?19#cg%{ zWx9>EdMEq&CZVcMs!1I77uooG(z$m|e`jfWw&RAS&j6p$O}WlaG3OMmguX9lS!j#T z$2F2rl}dt_ at 4iSy!rNf!ZOMA!3MC704j0YKq$xp^3~;=k$^q10N!&aB$%^0}& zU^MlON&r at 83@==ItH|JFaw_)&0COo@}Y5q#ea*Mz>=A$#1jni{JX%GZM?+4e=;+ zzGo=>>?Y}p8((z%%Clqen(SeFma)exMDYyy{Tm_zlev;|uxl?1YWMT+tWPF{beGZZ}fQo+ZMI|T_D z at M^Z?kUe_!R;Ivt!|FP9xN2Zm&8Nngqbey%j5OI~ZsOM}d{Ycas8t&#RCa=$9_ at 0v z6BRFsxvA9?`Qa5wbusRya!nhJcq(`4zZuWYZ}sO^8t{#~A0-n$W9{ZG`_s8(YFJlU z@{CNf2LDId;2p#^d{o?4d<@o6jvD`5Gc|ahRMTdWuxLrpU$E6zy0kM5lg>tpaS~30 zR}AQJAHQ%px#xaFHom#f4!?47va=dS6i%wGn(e9`^H~YUPL6!u_c$OrCoV*lHpPr6_Ko3_WhgVF|JM*scEihNZ2JQ=DX_eMr5iYcN(u{p0XLwPp+<_&JSNDuc?JaSbsHK zfij?Ns)`#z!r~8OzR!IqGe_BZY#bIXjQRc|M73kgRq~Q at G2ZpTt&B76aH<)%(Bpw} zafs66#}3En=0OFjvA0KI(I+wU*E!Dbm`L04SDy%2-m5_=<+ZHwPsJjp+Yie`Y72a4PjZ9vPn3+rj%F1s{<5XdAe6E zDHf-SmF3>uB==nrFR6{%l#P`c+v~DpLOzyExbbCzQZZOC#U96NQ6Yutin!*ZX4Q5F zvE(hX;beCg5;m=gW6H(jPgBId{kZrn^`n;)4}*G3wIH8_D`TU<^Y{1#8FUI!Gkve0 z-#T8KuK&H?E+;}U{7}j&Ma-UwqAV4jlOtApX8zG-n_EXj;z=GVMMKK(>Tpkf zJL(-CRdW7>a(R8h?JVJv=@&O^UNnj^$t85AS<3#Htmzo*c5!eWk}K}!73i5^(f4T3 zXK(q?tQb#N?;UtpaioAM2$!R2O(8<3N7~}DI%2ANz>k8fLn%_OM~dkYzkR6BWb#yf zh-@&u)tQ&MxWf|`{a-%zGN^V51)rQe%cnU at wdL|&CIKGewWehkbe2 at FM2zQy&@<`w z>(_(@rtH0gVa{vyD<2V|)`224KL$GcYf~dmG%%?anUd>BYXm#p^?mr`%UNI9s%BzODCim3W3fV@#f!h15^R32%&+qAa zRx5iiMnOt ze?0RH)1okU4ooJ1-{tQ4{*aoqk^{cUW5b~8pUM);6UhFAVrV8&s`Qt8u(^xdfK zdG=`@(<#Q$S<7v;F8dQYEVpbolgK}l(_2r-*XvD~IKN!Byhb>3VaDaFa^QyKgYO at U zTDoQh%^Jg<0&ekIet+rz-&n_Yxc^B at --q@9GdP(6`uja5UZ{@Z^ z&Z$!4Ug3hcua3nqUQtH5B;n%}RO|eqwQD6B>wm0wfUwuIsfcW~S`5R=+A!-ajL*bk z*5gfL*3;o&#kZzAAykI{Qmw`D`I6d;TTxoq&!%ua_Erh5Uj5-tOpO at XDyc-6bwb{j8-Ruzcl$R%{;+4i=enD}sg?d!#q+_P at dh789oSS`FDSXwU@)^3S6f(F* zcB!V*4$&w at sDG0-6igALh@^xf!^PoM9g3eCVfA7VG$J zs9l!do7*6rPLx~M_R{O2(k92lp39tS$ws=%e$~@%-_{+6KFjpUr!idjA!9U!FNhfuRGE)y at u z6?1#4307+^e`6vRa at -x0QfM^dg^eosp2W+*NUA5y zPHjuQqyeA6QjFL7WN&BKNB((WPsXE6lLnl4ZC16oCPGHBYkl$IF`4pLJ=VfJB_d5Sm2Bf1eAC9hM~76COf5oH!*oZz zk}jQWs)%7pDt~uYCQvBg=9qBrhe{Kv4SiZk|qc0u+p+{Mwq#)J9)}lpx@`M2;2pbs{{D&2K|0dx%s|312IEt`;PgS zkI%k?(EfaAf4+Sm2-1MiKUW69_qV8zEU1kc5C_N<{CEB3uMXd9TN_DKvqE9Ak30?Ww39*UycKIgnmC$2?ttSAsgu6h23h931Bnm;P+4DL0nVd z{0{UV=sVDJ zkRFUH#S0D&7k>ZljXC(7VO9elX4H-k_+Z*!jnCYi4+9!txWM;^3>cmksCUr+8YfPk ze=!)uPACN9+5I2 at 3~vuNP*^b1(fYukM|V6h0UJ~wsG;88eb9~;rmh|)uD==#*b5p_ zW&!d-=||~C=|t&6=>q9}P`XfhQ0Y-=AYg4CuI46aJ;3<(_fYQ}_i%OlMWDSb%pEQM z at BkKzCSbg1PhiKGEwEmo2bCU`1{Dt#2NjM^rz&$zTS-k0wN<~YCU;3m20HLpL#WGX z!_mJkhh`aVeGR#T{85ZhRfTKIq4(jcsu=$x6S#l<4xu|4=JyMU_WhUDF!PP~2*!W+ z+28(t6&U|jEPutv82>3O|BQZ&KN8DdyAR`!gL?=MpMa2vn1qy!oZ>JgjEedQ4J{o# z1LIMqW5-W0vz%mQJH^hy$#t6h3=c0Kzkr~S at L7>_qUSG&iC>hEl!8mk$jZqpC at Lwd zsH&+WG&C=1XqnoxOvjle3Gfo4bdnm$#4ab-x?_ zH=%31XCTyz@~;6A19A}vs);X(VcN$PJ-*$6EjWO2&y1dlT+#D2Gdi3c!h9p84XJ`T zK?}G);~a#d>_iF79SZ*;6g9sffK9>CvHnvW)aMc;0`Wsaz at b8TKwLN|9cG?H&6mvR zS(F()Lt3Eca#YT$U=MYaF-QAZqIEl<<6487xq&?s^w9t_+oI-E)ZFiZo&y}gyv7Xv zo1k046RqL5E>yS;I>ZE|vw?i~#YDPxIaRN1GM(eXj z$H&yn1fBCiJMp0FhH6XHF#|Q{F+-f-SZ4;v72-m-9%@9G{VLO+?P>CFTgnk#M$|~c z9OGQkWpVv0r7XHksH1}ox}FF9e$d~7zxvw|q;N-%5l3`uS%JDTgB-v)2KtDL(u&b; z54M=v9`qC@#Nm$?{ZHdd;2*}5&^7&^?e5>@ zC-T3 at 4>fAd|Cjll{gt0Hy2ns at Jwbd|^jQu{$G@|r1Ka$q at j##O|DycY&{{1ZZ*dYT??Q)>m?cen1f&Sm~y(PMjFxG}SVmSl0L2F<}AKjeMKCWm!C=Di!`M8y_gQM657t8=UtsuPJygIx zGww6xJ|8{MH-llOgZ%gT_&yWu^NEA}o at 1E#Airr0v+VQyU;y$7dq%;+`+6T191b#40`(SGfP#lhq at B|eH}{j zN5Ao+9R!J^uOpqnI6%d8*gxOEXh7++LyrJ2 at LvL*4m%gQUsle7*01rGwhO=7ixYkI zi?TAz3K&jD4|{vCOMv>rL5+h$1P}o(C7zs|96suEmLEd at KsG0{sD>UP5p<59Usr<= zbO1AYh!GiE8-ovMNM~>GAws<-f)FA?zkLMtvVntfXlCXHUDlT85#49hzJQ48uj`u2 z>Pj*QSvg3{#|>Nu9{e;gzmUj&I2 at cw>45(l;A{?c*2D`~6;l1xj`(2clS02g#{lE~ z!D(0=3z+B+j>F=3EKb1UL at Z9i;$$pt!2O#Wu^4qv31y?*K**r~|HJ$*j`NrM*RX$e zivRM4g1=qJzj1%Z2>iq0{Fm;}^g(|(-2c)Yq-FkpyZQe=@cGw1wf=v41mOI3|Ly0m z5rFgC;r^HI_XzmE>i((zD2IAu#-H?O*neH+{d)h+|80GL*ZcppcP{WvmG}Suo}?|k z(Wa$WC^SOBDlN99D!S@=g at Oe_8{{HxO`FgL(j-e#D9W^4)K1yP81BcEX+=P$Y<7r_ zIX1h=v`(fKnM203bEwQAc8(z;!vFI<=Q$^rCa9a+ at 3;T&m%hGzf1c-@=YBcQIZ2*` z`lz2kR$iT}R)_wgO-(3H#4d32)_jmql(|7N=ck}n|yTAK^Ee~$p zw*8?U-+%a#oj>^Dqdh;`_1NympZM{fpFH`~y+8Z;Q at y|V<0>7J7XapZt at j!oAe) zY_Yo9i1^pW?N*D`IKk+1HCbB3*WStNYWKAkE_HcqmH at Y9vB%zQ(cH$6pKjjaNVuIh@%}c;3vaSn92T+RO{>ei`dtN!ht_Lg?b3eByQ)2NkG^S620gWML6MGhDe zA7E*-_}xxqRz5T2yf&BB=5YqNZ5=q%sku>K)^?Bfv(}nFV#4~^GNc?fhtX?sEVEd} zhC`ZvbGz)Py3tweoF^x5kJaAfur~+C)9gYQGpem0=w|Q-aO&-flc8A>}zaipud6wH+;3#yifc7pG=llJeTuV`I z3iTIm^bU at yQ0)tLk6e26CJ^oxo2M<{u6iD96Om$mC0W`NVZq;bW+U^Ui z26>!wr|@}DPx7W%N>i);yy=?-okx8K*ze!wcGwr&)n5o!{dv9#m~K>mCajgT1`#hc zCvpQ at 9NG~z-vM1I$ln;jpF&!TH%U7 at YqdgTeXV zq`8%Q_O8L;{JS-`5?`a{R?;_V?iosXG`I2rihRwjovEMey4C(*C^$0)!a&Wljc^!muqe%e9?QsQ?&WA>_gs-*67Ff0qCJnR z<4q~=pz2d3Z=g03-@)Rf7XT^pAvH1DBNcA?4bHp%V!8FFC7oA&$N*GO-vk_ zILvTGRAOvlls+oaV2B+ZJG;t66;<-nO`4^xGiC*?Kc?B#&vea`ckFDy0-U#sg#TQx z%`>}ciLKc;-MN^5hMo`1^M|XmczqSkKKnA8$>nZ$YyOI#5*^J%%$62grK>{;IhPu! zhU$krDgWXrrg at z-j33gP zOkG@!`JG*z2t|#pjjgS%v9ZHC-0tOW_mJnaRWzGs!RC^g^0?k-ny9{^x}oO5!)srw zUpMpXHFKZTzp`M9+y|~+#AWv`T7so*vulytaehi<;dle7OEDa at d;|@qVxE zKD)!asM*zq6_j<+ax7f1uqwQ2QV~`K)+gImgq zr!1?1vdqbpRhmRu8`Anyd*PoNL#gHwlvArH{? z26uLDGz~8;pyAS`6ysLnwN0*yu)MxzX)QHIea*#nvV>Al$#6DZr9N7<$0C|k;* z)B!_(Qg5O=p;3vuA8{*XU5~P^)5>a!g8!valqKCkDQ-g}`x&Fu_8N}5NoS(&>to#6d2y5~6;MDqOu8ZwC$~qGL{Yy)++$)WQ5r?+8GO8#^HLf|AC`Iny#QU zRaYP->a<>`)+N{V#c`Bwz6yDng1nSaVE^jm@?D^Rs%6*eU;^d=v}I`D;Wm|0qcQgL z(7(pY<;}hfaaj{}qs?JKH6 at A)^{5~3)<6XoWaYe-o``&w=y<@!=67xR2B=J;HX zzh`Of7K!mXp0`8#Z9rPwo|tDx)F%$8r4#3xQbd at JLP^g1%KNdVg!Rl+-f7p)3yD)b z|8<=Eci8iR=X-Rf?1BUuX&sGuKL_)E8jToC8A$FQ-kX5_c&u0n`!*`q_aM<*BJ4eZ z>E%RIPGweahC9764gE(=FK;W+8?f_gdPx!`RR(Z`L?gnvL)vUe{j+~6{L?XqY)Ntt zZ%i!Wc~**(D6UDgtK83$(9eb|*R2icXKEXSJH~1Av1Eyon?zm$-GSF!t)8Q_KC71b z=^qgN29`Zblv%#^4ac<)#N3oq5^(KPI3}vEU zWR~#SBH7JhJAcccxyJoc6v7t7Q^CnjG9Fw-S-vhZSnr_N(%Xa1k%nWrPxBQc`0nbwko1u1Ni{y)o|S#;787 zT-tbV<`PA=>he-z{-i%zdd?pQhF;t1IRDJ_6VGR9xS2ue2*#1&# zpS=;i7F^Fm&bt-o+y?tF^t_Pk<_?^*3zjxp7x_Lev`>cim!Nq#%sMFaybIOwkacXB zwMVFbm^|~Ea*XeB8e`o}V at e;RF;b7V-g8F>)Q8+}hROX#t&_25iB`Zan;lShm?N at t zW66LvGI;!}Esk%BdM#QV5HHuoKHRhUK+GN4=y%!m>HTUNhwy)`B+(2t4Yh7Ayxtnp zepS&DEr8iV;}Pcvmor8yC!akbMxr8ErwCgE?SOTwaSfdZiiX5HA_{A?VKky7S&Mf+ zt|cGBek$Tgh?S at k_WRip<_1Fy=Ef83JJB;``*U57lUAx-7E$V|$WD#NbvIpfNG z`pB^N>AiG;F&R=PLH!nMo^0G>W at EjBGRb$&Tu=NNxT28~-2iildU+c9vQZMP6YhAp z>)_sL3h9sPeOO at v?!o5JD6)~d(i^tELgt8+q2aW7>WjGN`6}#LQIdHtV*dvsqG#(pNoeg^KkC+t z*elNE>=n1yrDS0dKGGH+NhZ!8QuZ}jPgUMEsU$NQh{1U%PK%f9J4 ziOh8pt(g-rt}k?NAL+g(WPViRUoumo*I?gMUVeVfK;=I>j`g-*mfbodDXPQBri*^6dIVALcJ;HTeX`_W`BU9Kma_cTo z2cJjW;nw&mF_eNjOex{#9r2B7J&ao}(KOiT`k*>1%Dy6j#+QCYKVDj_50{Q{RLHo( zx)*bcc%I^q|7*8mtpK|sF#bsQYT+1eo>c25Bc3wMNtA&(A)`KZP at UC3sXyLyyF~Zg zA<=9#Ps8|`?Y+vptXjBdC*k}?#nULC0oT1`T=SA>)P at Mp$2d>*qJ5h#u*Qfq#zOi* zT*y6v^i7GPzo}@M;&|wJQ*msDIB!P4d7t!5?MFEGW7v}VfH`95x?jw(m|s!vp>ym9 z-;yZq+Y+s=4;i1CXjjy0X6bOstoJ{aQ^S3CqeK_qjW$!mg}W#Cq_t9n>+z5^xZExQ z>-rM^wTAQZ+IJ<|dJpED(0T|QbCI-qpB&OAnVZojuztkJ98$O8uDPG^-q;17=gV!J zpuC4ruRi1Ebs^?jvzTi=I8S}gC}54ItzW|4r-kGpoL_`{odw^Q$O?Nuuy52~_+E?8 z+<`DxJ}lAj263MQy)eN2Y2(?3FlCQOlrO@~fc|oTJG*!|tdWP;)c)3hLKD$pzPV$6?E7HOxBU}1J*Y~ zp9^VW<@W~+sF&pM?+=7|E|5J!na|{F2{4DK!`Mo*$J=IY(9R4u@{t>T5VV?1*;~}(M*<*0c z*)a6pul7Rm2aWeI&-AtRK)^ky++#JGvV_!<}^=ns}DSR6Io at 0Xi zo at 3D56Y2SpHlOM#yE5G!_9~Kvcu3zRzZc at a#>&qa zhGTu5NQMoG?jiaASFN)H-%t35)A){*E3K2lr7=G-pdKTv-(JR8%uLqN616VFt^F>r zKAU6E(QU9bf%9ePXD@=*(ZTD2FQp=F*k(1n+^3R*=bR1bQ|fyT5$1n)nvQOUJ)xCD z%tQQnl6 at m6)t5r42NFzDUOeSt?B~5daxiNkqd%=T)t%C4FlqCAnx4{D8{EkO{mnN9 z`GdU_dQFha7*FvY|MS5cb9K}S%QcI0Q8xxpqJP~cmk;H_nsgMdp`#EUbE+w#e at -0b zY{7lYKuUjd?{IfwV?t59DWD!W9Im^g&_1I|G6$8_MzWS4_e33Sfwijn`3%>Qa_xFB z9PO6K^HIn=lAwK#Knkb}W$sVG+;12<_kX&3!J>%u5$Rdzr(E|AIUZrY%g&3xJY#ao zFGKI(<#846Iw{|07_bgiuCJN93?cWSB8 at O>!UwL^(X+5sfo-%p;My1d8aEhljl&u^ zR@~#nY416y at kdwcXd>+1kUkLc{iF-sk5%dDr?A~>Tm)?y#_P${sH5_W{C<7;dg_7Sp*+8z^-?|6T6J_fAnbAYwc@<> z0eH zk6^c8m*7o;4#DdMrwSGc<_IPVmWce#5b0zI_qj zPH?{962T6^+XdGPZWr7o*eCePq~LLQ-^5_|py>Zc1z!=IFUIc-!BW9|!7Ra8!84aA zJT7=xuut$w!G{Gm3EnBVOmK0U^d0cLr z$G4)P)n at Zm@W-*M_`8s{Ab*pk#fD>5vu9SB>n4{>vN{|@olyq+K&-BLcB`$b)#9N#o%rm1h1Kej_f)-BU+1z|>+DS)i(JNo zRBdzE)HI&f*V`;sIdbx;c{aK1P9JY7`GHy=4g58=R%xooW)m^dMn1d2XB7perJQnr zdj`^Nwm3KlHA_?FkNS(x?IRao#QS6 at SvbH-!t-|jS z_PgHZ@!DO^nvM#}h^n`F+fk{ls5mQnCg;lJvAaBW--;@Sg=^p$E=5(l$76G9Th0DD zz)uakox>^)H3r)1?_V4aImq_sDj at HAuTy})o49ARpx}udTX@#&Im*6*`m6w$y*l52 at Pjbz6MRt$x2Kz1d at NdL78V z at WhNvHBPI^f~5IV$=nm+)1ku5PAF`??5qbB+{8J(`EE z(wSV|fZn*6kDDyq8EBbYOQPbmQ!bI~NAV>32G^~U6wPg at saIR(3UyAIUTODX=n}mp zMw{}L7xXa>$<0yaa`1 at XqhrVHcAm>_gy{^YBp(%gxT8RsOhjZW2UX3 zhT57swUfK4$~D(%ciMe+i}D at WH25gVO?P^I7H2d3D&aRh;2f93Cc<~>v_8l)ZG~5x zbAP9+wOQQqv_vn`t`E5 at k-eczo?{F62CV zAisN2AD z8Kc$9quHfhGwFV{{ua8Nfh~yDnp_Vi_9G{2`Ri`i^7*rCX0lF1zR{)a)+$bhC%}c$ zREKw(7$0;4yVTJ_3)m%hxaWslb*7_q^ZB^aX!Luz4G}abM-K;F^>FttpQDTgto>wH zwG9gc*9t0)o`L~d8V{t=4^B0il^o9n*nRjd1O&~h*c~+X^y(CMDaKl`j2>>7q14_V&8HU zcF|0Gv&ZFyYmu at V@GcT9CkEG|x$@U6Ee>TL1!JpD{yuD$pdrK^8{#Ix!4`!_TSAV9 zywv#nTY}?P-T!R->hG`a5$CCU{p}ELb$`iC!maK<*`ek~xEs}Ugu7O_hY9x-;m%ac zCEV(hFoSTXsTmXQ!Nm$YN6KI2qtG%O635H4!yoSx$JdQkjz@(Y-y at Ef&I%vCTO99* zaD2Tuoa9nI(<+(s_vj&y$tk;FI||te$x4o zpIZLk>;GQ=_xdz1bqw(}C(u9Ct-dI4}`q_!@ERJt(d2EQS$V$6%%yh>J_Um634f{uJD0v3Rm4f zAaB`TeoLp!9XS6N4<3Hd7$N>u=MEZr=f8aDwSfUup2KyuD&TmZ_+6!ag1v%!1@{Q<7VHt+ zA=oXrNw7<>L(n1ED7Z*)zMxsqBv>a{D_AX9E?6cwMX*$`NHAY8OE6I|DkT212Nj+a zJT5pOcu25MaF1Y*;7-AA!7jlL!MLd4`w6%3Zxw75oG)k+tQ9O5ED|&d8U#LZoyW;M#1 at lwSq;0MnOYJ{3JMdK;&1jPjHW5w_umx zN-lqu^(`XZvSf>hF&IPd)xgb0ZIZv-9PVmT?!A_i{97Wl8}>5neV75~ zUIv>9bBJ)0gsJ1ZN3i27pAT;puiYZvm0$VCsLnI*zvZ8|C(-|ZNPK<+p=QWOBdAO_jfbVQR)X{11cq&mEH1pgDq5^1sD7_#PYcXgCI3f$zXXrF= z2FwlJ2yTIOKyL-F$tGG0T at Kzl66+A?PH at 6#^jYZq(eg8C>Vw`4_G2*gLo;1Q|Ps56!_kq7HMjoIKfzMCI zoCCcd99xQd;Pb(6z}7l6K_3FsrecgjGfl9y(9EW3 z$Q!g3eBnCO8+1QdI1_aRT{KgEkARLsGnesptsm?7eF1tB77M)>EUZUeK^K8r8<00> z=CWP=4q- at Kp=)34Id0xCwm;Iv-4K z#yXPI0&j1 at oCtjyyt5VSd5#mj(T=_ZJ;+>w`v7R>ZM{V@}ZhptKS73>^iI!3AVLv=Owy4nenqzk(g(I6?a+^i|F?_})F}2hf9{ z at m`ecyiR5%EC;$8Tn@{F?f}0BD}e3>{{Sn69sr9rqm7`MZ^5dekAbcCqc1 at -?}53Y zH-jnNC_8i-_{@VC%g}w`sjd91PEV)7^lkjzJw0WCmv7 at eXY@1$T(ph9!=abCVjF*V zOiwGp?rr?6QcpX;uI>DN2EEKjVU5r|;E0EKp8`Flfi(~DGfF-6gJX8^^;b_u at TDF6 zj8abr!N2a{{Verl_&$;6VdNQlGuZM7`Z;tfc*`UF{Wv{!f)jS4jiB?v8)5sQ8^Lew z?l9^{Q=QUu;W}7 at QokvWUi;9pz~4GKeQV>537YX^x*mq z^FbGZtv|xp1EyrD}wF at pZpbnCJAwZhkwO;mgwmS_ at ie~=g__2h-WbmK&OGbVH=^DFFnh9$f7>M z)36=T%=A9qe-`ZvUfsu^*3wfMcq6PAx)EI4$KNT_%X|RV54{z94t5B7KloSJ0Q5;P z{W;TVSU-EO_y+QMb5^=7V>^VxiZ8 at z3*C#;70A1Kv+^>0u=&}HCLuyW{LaNG-MFX%jQ3(N$)6&&^=`X_WOcsr~Sx(hr3 zYvr^+Z$J7cv=2NC>wrE2e)%P|6Lcf^46F;f54`RG`YLoC_$X`>bPqTQi^6W`YVap7 z^K&V*Blt&H5A;!R0T!HlpcjE_VZEIHfPLSXH~$uO4$b^3tPeVHUpMBGL+C%y%;m5s z=nn9 at S5Xhpb>L%%QC>bDyzTeMKXe!P3)nj7ec)l(M(C5^l}9kvpr?RKVOyac;5Jwf z^bYVf*k0%XFyjxH523TbXJPxH`@ngxqn|*p2cLi$qV==~JPON#E*e0;`XkyGdO!FY z%mh6EUiv4r7j!9j>rvz%x)b~(Z0hlcEFvm z9BAf#SROPp^%Slb&}rZ-SShp#ybD&wVZmo$)zE#Q{sW>q=qPaOXK3{iEczJbf at W?zkMp2+fT#JXM66!sE0RP8=tKNr z0G&kT&_#NQz8 at t~E%Z)MA1%>*=qPXntQC4I_#CVYdOx^|_Y#9%3&swU=pZz+05$;4 z{Bo>Br=c6c`(Ooe7^h%yoJ6J2rC?6HL}kz>a1E>udM)@a%nWTzkmwnh1G=A?DA7*n z4)71K0q6m6eUe0j(98+R$WuJ#Jy61pWfpW4SUv*jLNjM(Abw~Q=*^O-1KI~Zk}c6D z=-r at ultg{d%x7R(31~a;EtnBHCkOR2MxuP^mEcpbIz9)?%tidrMPPF-?_s7VD|iyt z3eCLPDA7s|3tpKg(K=`|_&eBE=tJO_$4j&ux)D5^FVQ||=8cy~bcFrEF4$4%O<=~Q z5*>%m0`G-ICE_{`UNjMP4V?$>gq1?~f~^G-nV{E#?n2}ln!iiej+^RM*5Fy#T4?4P z+ at x>fIKiL5c0%t3w-igX7kVqmd+8p7X09wjnW34NOqM8dIQjF=ggy at b{2GbUl28_K^0kNux)hB5BI1FL1-}S0 zL6?KK!{$SGfv>_Ep^t!Ta0l*#-T}s6hqi!D1WRB&94FXOCs8jn^Iq5>^k(oW*lErK z_$L at 8V{C%MW}+WL$AS}K2IvBCHY^L;1fGHAL!SlPXCa@^9pFA#BQ&#THqwS>{tea( z%^ReDeGb|kdObLy9(jN+1sB3%4YeEx)l5~>?HJQ z at Q$yc&Qnou;0t$RtU>pK(p?y9&{5!3urlZ}@L^ar^iJ at XusY~{VB*(NUTEf}Fb8x2 zcq7aQ-3ZfCk^v9m~}74IW*G*D}rX;4l9E;Y?f#a%mi%)?}oXdH-SB{4vvTQeV8kt z_kq{kk9vSE2i at JsD|81qZ42rIx)!|lL5ceL9I$jN+8vttHtaNX^)_4ww`2ZHM;`$D zVFqaCR~|xLLHocfcOd`JQ^3UUOEd+Vxg2JO?f}~#MtiXaZ+Zmv1HBU5{3znj(93)U zRsek*9M^+9LFa)tz{;Q(fg54f&^_QykD<>%uLM(fqi;Z`fu6_FAE15U`>>tRC&A`l zpbtP>!TMj}dJSy`--R84E_eoG0(KmlNzb8gXYf6RL{oo_vS*^+z_Tx)&7hg3FT$Y9 zz~!(q=#}7UST!{Bu72bbdL20KC5&I_Ja8(kgX09>fptPV4xp`HMqh+BgGXUIGW9Zl z^a}D1y&HVvcM=_9e=zS5#v{iG?uDI(W?uZN#5XpS4 at Mus*n~EMZ^Mkx$G}y8KzQi2 z;JVjQ7HDS50Qwtr8u%yJeCVU#>_4LaLz}?$uvX|zVDB5~XV6UZpHTnM%;#Y1p!b8{ zI*Rs&-UvSTCfbGl!8vcCKS2AyWpATg(4F8Q8#MEcV-kP0iH?G|{26Tl-3i9LgK-2M z3%0-tpj*Kae?eP7r-7Hl8lk6v3t+9#i at -H7H}qO?8>|C*2lyha6WVYbedJxNEufjl zV4I+s%_lIXpsnB at SPwMw9IO|b-b1-yeb8p`BiIq>v*1H#(62ZxPbDr_h85%4L4j(VYc!4WAsIu4x% z=BDcCB(xE{6&5=RbqM}_gpLZJDNRQUVAarzz;D3J(CfidFduYcx{elR=x8PMN^sm5 z9j$}T13!fAggy(tkgKCZ(EVVFQAcN?)4-{)s2ucpaMD;EB|=XDH;vO#9&|VO^rbo~ zgWd0n&ml12-1JpqXc2d!f&Qmrv5se&}lODcC{iO+`8yUaX^|&{^PD zC+lbsx)bbyC63n1JOE3BJ~)|wTSG at V(6L}4EDyQ}{0gi9dN26Yb?gf*t&`}+< z0hF#py+KET6Jb{90&o`00c`@CVLoUpxNr*U6M7MND{LKfC-`mHM(B;;)mQQJZ(P5a zWjfjo%^ZIX$_SkgPP`Ungsukv0;4f#qc7^{$FL&kJ>X&3lrg;CL;4juDu-q^S0bO# zR&WJuJ%rJ#2lt^&-p4u?_k&E{m%0&}c`s}eH1AE#`%15e zX1)R&gyy}adH?5Q(99GQ+6(u3yazPz>AVP$xYnRmc?aUaL~7(WEt z1I>FF^S-w|&`jQYb~o-*q#c>v)BX8cqq_CPb`eVH(iF?k=QI%p>Ele86@$$J#_Lo<0V95dz_ChuEw8e@~m`_3%FxM1?W zF*~4{ywA%?XeRI7Qigucyacumn)h$1gRRZghk1wY-}6$w7owNnkId)CVI=Rd)XAS= zb}JwE4LDCC{vpz?A^uUszi-zKfBqct9*{pr+z;~k$HK%TDH!6w31f0L| z0FDQ^QO9x}7Rae8p51XY9yBhqgP&eg?{$4pV8X2YpJrblCHYyDk?56 zrb{llgtD at -_>-%cgJOwZJ5Sr1mGCG2vDg2WaPOTzF8~qn|NIdD^FiU at KBoX8;Qvs6 z*{z0qPjRbB1ieE21H+wv;wg5Ye`5Rgr_RIi1pj9p?7y8681AF`8#t;n^%a#1 at qa=6 zLRsIy{^w4-Cu{a$1cp0hoWS9>8Hmy!-DXHL at PDES&%poO3E6*w{GTQSg*#I|1HnF; zQ(RF|yl at WsCjYOfSQy~HQ2uY25uBbO9a%qROy9oEkdEZi_`j|F_U(oYgga%FwR|)q zB)t at rjAK7jKCOHz{-(jFqH?~R-Wg4QSRR at lvYyVdBW3Z&qXm2v)hKkv`Ecn~6wepw z)z;5f(lg-y!ZUCwdd8VKL&}$Cvl_-5l=5Y4-!7NW%BiN`4Sz*LQn>WwIuxna3Y}7{ z`t$$d(DaO?v>cZbZHD#~I?bT^^Z&Gv at _oczLFpq4xsSkvUQn<42oJ{%uDYdsM zeaPQ8!=)!;^`|#Y{qOIaA?c;dZM2O?fSQ&_iBrwkruEH`^it$Lfx*5|O;3yhIlXCf ziX)^){<%e at r;fT)Vh)Iup4{gb%qhMhOnNEBTHhQ}J}wO>IU!8>l-VF$dehH-C~9v; znEFgvc;FB?r?rFi;)(&(6;*O05p zOszPJL(BtjLU<2Z;aU!t1Aj)4WAx#O;vO%b?Sik7R)FmYVWeiLHsq%rXL8Du;8%t7 z?KrX&dKP>;aLfpEiSwP%9D6&Caf}{0440r8J`U|%4rv!*Tjg{8h29`)Bs50Vt6)W`Yr&XAS14skq)IAZ0S0X|2$BM5Gv*w3|n z61DbekZZz*8sYN0QR7?(O6>=?j76?Z<%m*m?Ko>43#}8a zykDa$k#A+-ttL}Ys?|uvBXY!Twjv}2+$~BEAdW1QV+oy;(^&JF>s`s`_vCyIM$G4Z zR2h)Z=@IMWlkbT`mZjnc!8XMQqGMsJ05p_g1BUV1EMUI=*3i+5n9?u2) zqF1g5Wu&*jRUpUdFHt4VW)(VPe7cz+^5~a)5$f`LsLLe!i=Mx`?3H_f1N<`D#e+Co zXkN>2c9Mk^5a}8UelTg5$tc5!$!(pJU(pAVS|#>s0CY zd{t~?6t$=jm9#cr`;>> z74<1&o2-OGNUyEAowqm?Zuod?Uuj1wHs1YkncZP)v3a$Ffrph1+mMku+cKNO=-~g? zjJ0^DJD0hZ+B{>8?e>ah-YDprv5PGZuWjtLWtT+=>`(l%$f=iI7L?br%d|qnU3QsT UpVyiyOcmAD(`QYaKVOLd2Rp@=@&Et; diff --git a/distutils2/command/wininst-10.0.exe b/distutils2/command/wininst-10.0.exe new file mode 100644 index 0000000000000000000000000000000000000000..8ac6e19b8eeaf7642387123c749f416251c496ea GIT binary patch literal 190464 zc%1CLeSB2K)i{3lCCL&Nc98 at kM2HevaM4B=G+~1-A-hB+;l?GqL`VXPbh|~R;a;GX zK;q5X+>G1Or_u-eXp2?*<7w$r`cxpk6m|*C2BgZ{gD6xZruD9yVw4ybh}`cvGj}%$ zDo>y9_w#%CfHXU?2+X71eb2R3sCj^m8*S5=N{S(CTA{;_Y at FMlNOq2-T0`k0XS;KO!?wg`C1tt{7`Ip>F*OyC;xYBmrnl*u7@`7Jl#aE!P9?)>yBS+J^gEXZ8`lL z3OnJnk*5FQw2fYyPQOU6PtxoC-+9Q7)!? zFpHb at Dg;*e7w8!k at E&f{I2}z-UOC=CiMF%DfXPi15Mfi-)P??fJM`Q(yeTkny<@+g zrd_w|x!h~eeEVMg at V)^&x9Ptg{#N1P^#WW!8LwvuVOx#EVe&X`*{$^}mJ7>)3@>SI z|2155hZhX>I&WoQTs~5<3(~KH>;5mng>S82Q~wa$)3!pJxh%Mz_;PvIXzKs}`+wjU z3=3mgP0a^|k&PF>B^o-orB3ma_A1ijC89alZ)t7>AP=cA`SuOmUcxor#R-oEmU at -G zZ*v^!t*#FC3w553bWY7)bXy50yUbET6~7VSJZko0hzN$oF-0z`VD|U|X=;^KI$(~Q zJOLftR#*4C%vN!te-G54W-kS3slmz#nS-Dm=}X<>Q*}%1VP|a(A|>^hJzgJ~QwYHJ z{?{O5xJWdK8Bv!Bho#q#*s7w*>-DIjS$$h}4FEot;16jP#aKncAWl%lJXN#;gfdoD zRj3Hi+!MPDKvIGiM(_-q__vD0JWF#oEz%s&t5tc>-mq{ht0tfyUW+)Dg70GBey6>J zi*3{LX3)H`XW$WlgiMSx(72GRk5|Q9Rm at Sdolq%h7{Dl=_o>;X at LVKji6h{?3=vxD zRmCiC!03gD1rPz0z<6>>eOtP$iWV%p5G$VsmB+u~^?Cz(=m;KQJhIEAzD-;vM~65b z;jsDsEPz8|=K&4u;kw!-2N{(Mp(jk(>t3HFxCI+q7;<$6I9{0%fPQzGWmhM0WjSTn zVdBaO6#AdGW9Oejpik}nyj*j_t^Cyupbfdwfm|S(Ij9OZ^ZpHp*hB;~^2)!Laa?t^ zqeB=AiKBSGAF(k)TL77UfZ>%pmjDFhp1}b4p#bsm_WmN#iv5 at aZSg2Ku7v(OV0J@` zXF`kXNQc&*rXDA7$McOJ0Mq;#It!>k%5&tPaz$046+Y5nl|y~-5L!_9v2CuIR9bEO zC1so(?1V_e{-z at SzO;1tPp9BX4xNBdFxtcManeh at aL?I~dAutfm6;oIkWZJ8@&Wr_ z>XbGh)WLQVJb>lqW{@>E8!i}jVS!B!{yv>v-qj8Fmf%}Bd)&p}XE0iV?Qn0|=b0ph zZw;9<zI}-)7<7`Vn2%y at w!%%v-zNS(Y8Mn!kBv+i*-xo&|z%VO`$(w)WX*Jy7|pv zF|qeqAo$9c2t}39_OoJOgv`Lf^imHJAk>^j>kJ-2FiO_=d}1OIs7>B-1|D0jPQc-! zEqI1z-h~aTRQ~G=9Y+Tr%VTNA5vT^SOcKH{WbBj*Iq_v^_XI7MR=<+(r=86nl{+pc zQwAGIXq#;Nv3-J9-Gv=&*)z|~Lok|G*W|Da>Mq2Z4{*w}mr)iVvrf~X_f?nxPF<5# zsqPBXYTp|`n4DnP(mW1$O8OJRaY8-`A}cs~5tLEnp^Io} zdFs>`sv3)Yk!tkJPLdv4pK3;h(H142wvL%gDflMqB64(L>q6+HgJOalip2EiUm}O3 z4-DsDB$R0kopR_5B_n0BefLbTb!zlkt&4m5j`UIP`0dV>(ezQ7QioppL?53T zFontoDtTr8Ba{h>GsQFnTiB9LTm$a7v1PQ|xe#RfztzFaR7+*R^DX^28wnY_ at +-(y z4U>dY_2oGL3pu)y>LB(|T1-kbe~c+q3)AhV{S{DmI%K^Y#8dQx^J3}IgRDdGX*$k8sM zIGLo12|ft|D2!Ca0i}Nxz{Ed*X4R#HnR%EnljbykRsvHWOxNOU|I>iAnw`gH-UfG-q8Hs$v%g9M*f&F%(pIyi;JS`}jU zSCki at ZSQBJMVevh6k?5en at Bw{#`5{wz!NFQ8eAbCOxvj6;QGtwJ8A!gA*4 zTcLyi=ha4e0AR6dDC{~d3TD2Y6KKWlTET at WoZ>H0`k?kYNFT4g9|XMFEAQ%pAqG;H zhNoz#(6E<=JrJtdPlJx*=xCV0`#bL#-c-Oh_B)__D2RFaP_Ua_6n5!imow~gf?bZ_ z#s4~#e!ss+5pP at a`mdDFg3?o{6K?PQJfLBu)9OqNfx%}rdn-^owu;VMJxQ!g zNqP^0__OH1qO1ozTZc at 0@?V1ANGp;8Az;_aN(lVyJa_7IeA(U^@(F7~!he z2hF*i4!#+9 at hf+vO0EW|SzH%B&wzna&BpucVf^gzl3DqX(dQ3CmQZjjVtUb^cPEr3 z|7kNkMT1X6h?G=MvUFZ)`!2}o8&EC)Reb7kn81R+!`XIL8J6POq0X8HHTrw+K+H~( zS$PE%G(^h+R^E<-58!w|l-q~`O1SYW67|VMlxPb>f7(8DJ|RIwFFC6m(Iz+sWZOSD zp>@K9Mm~cnt&MLlWnmtEbC_bkm at sO~;sX?}{{^jVrE zdXo?~7Ky#W_lv|HK`0Ux;W2+H_JQvH_INLB)O1 zFfDhGHW#VFZuKuMqz&5%ZCp^8BvK)ZY=hew?Dq9RB83M at cVLiSFAvgd2NDS>^pFne z(Pn9WfZn`rX|ALX8*90SI(}YYCyoSICd82wq8KY-(x`TZkvG8 zpyhVjAyMTZ$_aFTc=xuzyy8`AYN$CwC%TQ_APqaod$#wHRvEyl_1ZSVKEJ+jy*PgY(q=%H2*TC0s<2zvc!FZ0OO*=RtM zS<7IM2R^4`BG5+%0SGBFYpE99L!SdMcwXsBpT{YWf`Uvswu!_DFi; zdh%(=t>cwPR-l%Y;b<48c|4dZ*Cf>JEUHq0!1bzkVvg(_Y)}R%PqgIfXapL at st8TZ z#S~XxYKWMJN(?$4ki%1hK^p-BgX02)$5Vw#i9vepVaF-KQREsB?kIBg3AYuw62c5e zuQ1K;WMiAMS}hO at r~`u<1VJ^}uX<>BRUe7Q4N|8%sLgHVt};zQgS4Oac;uh}1t{<9 zhV@}Ur*&d*m5Tbxuzn1pu0XpoxSHOnyQub%?9iZpNbF#@{_916NjNTx1`=})Y4T${>XOCSJ$Cp4xSNj;ktpC+rhyd;DU4E^yqUSK(()=TI zC@~yh+5=#ckbqKY`;Y}f^KapmA3R8XOOErx18uJZ66B=(9F}K;DWk=#N5#4yc|j|K z at d9l!?+dwS at W}+q6Exo2vb11^^7(nVQ(jn(Ds`rR2Quhch^kJ)Q}u%yN*LQF-p`Fe&yydT2_|TQnyCYqk(3e> zS}an{UV)<~y at e)fAk4fR#t4vvKlm<_bXI?E5%vm$o0UhkYE&nvOt6P*m7+mp zjl+-y`^04hp=4&csWjx3E03TYG(q2%F^^YS8SAMs;n6a|S0+pxV>8Us#gg{)Ct)ze2x(KTzOb zJfv+yb)~ppt6dm_HJ1Vyul$X9eFC?lJFj*Xle5BIq&HyP=L86_c+US?NShdzPN^V( zry|(Y&rqsQ7*^U0TH3#|(vXZ{Q1YC$O7CKgkdhquNtJ z1(T04H}%1JQM3&8*r9xRTWT0!8FCTGNTn7}rPcuHMPHp+&LJI3>X-p5&EZU7s0vPL z&JjSH6YuoWx;G%rd!NTd!e<7!P}%58!;$*~Mqt3~1(feZ(+fwaR@``d(pGKi$-3Gl zmUMwxKeo-adTn#fXf{d8O`(~3`!RXEJT~bD>!E(D>vib2dfYqnI$_NH`G!p*c9aL3e-dT;ByIqZEgur?t z9L-;fLcJqYhMuO`#{oD3V3~nT3Z{xLrz(M3%L+E=@JG1GH>3JFptr{O8fCYcpbJeD>{OZm0%x1uht{S)-}J zY=t3f3BHIdsmy^f at jQwk6wO&UH`GHaogh5G3D`Iv+!SMF3B`uW$FejuZYk-e%}$Yy z%xYl-)X3yl3|PcTe;x=H{}brE zaNb;^=sYQ&YwEFHEh9alkO_*Z^k2{EaTv+L0lJ1 at _A8V>s3eh}Gcpx|2_y9OY}C@uA=8eODeJq*!qm{{1X=V-SC+ff)(zX8 at A3NA$&hbduCvKTYQJk4UH135{_ ziw*U#R9^G*X$=$W{pg8Q4lkzKT9nGgG#$n7z7T!ly;M!3ZuV>w%T>TC=Ru`Kp}q?V z2!nyn2wvX*^Hik#BV5x1fuZF$qOmZXhUzX#X-dcx`I-*`cwKEW7PEnrrX9RQxv4Ta zn|sk^6EFfdtN{C)IWB0+NN=!T96 at JemN(dMc{;3lknDiSn+Gcj4|5+Rf z14&OG8q(T~G!93c71T(jam8?UgxR@*Ez*HVI)}RT126(^*^hla2S=4K9kjM54XD~# z_g=y4n=2g=OK=U`Mif-_(A18tfJZt!fm^Ltjn21UD*%D!q4c3e;7ZClBPE`j_3lDfS z5P2wwAAN^@>NhL}3VM|F52E&Ll at fY!GV?5IZpE268n8XdF0gu(iHoQm#u`l1hU8E_ zq`YIYA8h>zwxQ+eQ78p)4eT6Sw+wwxms%5Re0|gfh*~SnTjro&d5Ad$^U}Mh{rQDCRJ$0Gv=^se=x0QOEe!dW&A}o47lHccF=8o=sD+3Z z(2mm9MU-Dwq}E83RYUHV!9DMvF{dP%AEydlnX)EXAlR{3j*?XkL=8jz{yfB(MXHOD zv&vMuSmQSjCj5PI%85;v4H)}){T#(^l7QLrQp+=RvBu+1$4L!m61vd97B1AcK^{%; z_?6<4|!98lWEu?mtqT!iuNpTK%Xtr?9({#LgcsM&z1OO_vU2 zv2|;zTJLxA8bty+&%T$7(L+U-Ow??SsT6?bv&AJTx(!8Bi}Pui?L*9b{tbhu?ecL~ z_3SE&&H4IN&-!UZGeiIe>a0TJJkW*d%FV2op(%=IIkwKpIz#zE-Bkr at 5B_3W%)iwS zwlbGlShSTLLtD9bMlp4e{102{)mk~MfhWM?vWG#1ai%;sGww+_Lx!8ZA-dUsS3bTE zk!1IKhTQ-5K4e<+T~{&fqpPoG+B&c18lfwE=m>rYtyyNr{H`ojeV*=WrP)hFlTW5* zs-^iaxjvzejX>=IZh*ecp6h)!VMIhhunaY%HMkznj<(_ zM}xRF-+dSQzwP1H7Ai}hqT4l48lJm)#s3sQzVuQX7Cmf<=Ido9Yc+~V>@p^sE2*sa zhwnlLD6L{+K at VmZF6b_xdVB%x%#$)Dw6z8 zGZd^m;njT0A1zkTRge3fNs1(67?AR_a at -5BiyHVRm3M)emc-pu z7qY94s4x~-E2Rv1nUF7{y&ztv9C;syeh<}&=nUcWv9$#z?a?(bb(ra4)TVLouvuZy zR}gfwNn!XZzQG=D_vhU`xLm)P?PO5YE^7KM9^@xY?V(PkxqiF{Em>3QQ at W39JJad| z+~A5`Ql*co;`ZSxv(oEIG4F6!b%?r?`6#_SO%0g!Y~YTA%+jCGkD9j5ESid_3wDj3 zHWW(y#bAle!%C#86RHDD{0nze-6T7g&a8QxW+WZcg?A5a5a at 3Occ)y`fuW92#h$DR zw|tpEVLh~^&nRwmoN;H|mdMTm at O#)1&ns#b3X+E-LSFODIwDoDKCD>K4 zkDo^O1$$<)$p11Tr at Zt?avj^hlSbTu4%Sot7w<+r>9+e*rR?1bFKg4$)xDs zKvP)ul=%JKyp#*ogBx6weP9g)mhiGGq3oCgbV=AxLq|F;Tbhx at yz>J!KINx#*alO_ zc}sH>#I{Uw2Qt0NhBx2vbkn+=P;#)+FnB)$hUi`U1tR-C}JWg zs=_#r|Mk1S44Sh4R578fhWVqqwr8~5 zlu6rT4J^S7Z#uMfPyNAmuQO1?E3c=Dur&WCI)^&aIrnRgpTdZJleAxZaK at 8fd|{pR zaDtN_9^iZ-S1%vXdqWKaQCHs#X9*{Zy~??>$;l$y&q18x<+ZtDPX!`G8U})@Fro2o zPRIt4({nf7pfiMUKBZwkPT$8#hwWq?TXlkYuO9^466OhLn}$BHbBnIB$n6KgoHuv^ znQ{=ljdG}mhJ7 at o9>)+m1(yipDl7eEB_&*CQWtl|Zo{d8tTy at rhJB?axN>pq{I7-4 z=*#vOz;kRmh$H_DxKme~l)w2w&FQa3hxsSCr9~VC21ynqdSiNk3rth&^Qk|*n_vIQHhQtxRs31R)R at h3&k;q!*cQhO$8k96tzO683;*cl8%ti6N#O8hEQ z_yt$u`gcErQFNeKM2-2~g-+ZQyo**!{kh6VtEor~<6DwWRrla)>ID%rSt zgBZ$m7cMeuEXI8)XAaHzqg2juUS-E+np5zoL1gsUAHST7qq!C*a|ws8O7C%EdSR;6 z$zJ8~MOBRl*xCx at SS}?D!cvtw`QwYO%6^JwzgU^fZt^PkUjjJWRd5F=+|d-AS!Gl& z!xpG?BoWHDMfBeC)D at iL*uueD6O)M0P!nrtd>liE;Yc=`c;&qhpdjf_7^{&$n8MZp z)LmUP#Ekyc&LJ91R(#^H#Im7@)_~JL(^-|lGNfvD+f z at MXHrwmp?Yy~DrkOL+nYW$FiX7dASsu=wG)L8hzs{*bjsz%uvGGm(wXF6lWmwnG*y5h?dCYka(Td}3yXTfvJry%nLTbU)e9d3A4 zw9-!UBUkyfB_G;NKFzt7<<)i!3qV>QU3Xo)G_*j} zDqO#IJ^R8fPMXp;nDPiqnOw?J5`!s^vXsCqO!2 at Vjk;c+Nv$@4R at _PV4IBW4S$3Vl zCHJT+oQD|Jbjvk8a?Poz>jZrZID19`F2gSwveuh^?^TAdI_ofuMlxqx0WO?dQ}g|6 zC_g;@!=)wMUbM=+>e~UnnkBx9-cTj+7xW_ at o|qc0RLS%B50p~qR|X4v6&H1t&J=ur zrz^W2e;WhUVs-xQ^f~)kG<0a(3&>!Qu=Ps`$S8qKKU>pk8CDKYWr)Y$2(f+vf at BIc z`u*q<^Y~W`%aZBu&A?cm#kS$ozOqufY at g25_q2(}q<7WeF)>eRJIB;qbb2T`^g(=F z!BvLX%ebc~ZJ(27y(;4k$|km0wov(yYI~wvF|e39 at o?iDtdtXOhbq(d6_#GT;Yqk2 z1Xuw!$y$1qYDm4)$}iu8ZhHZDEhv^RP>QhO%(k~Yf0)VK44&yeNg14>?c5l$qS0x` znG{&#H6)j3FS>BNDIbZ4&U5haA~Fn04X?8OU99c}tj!S>M<;jD%y7YU(BMZY#N^9s zh{P-Cg(gQ7=V>PCgbbw?(2ds=&B0{x`sbhRNA+H}(W1`U){ib1FWu$kz_(T1ZfbBx zg~?7KH<0encOgK*cQnI7H2L-=N^t|8Z{G>HD&tSld8Zmi0TTiqQe&l_S`ZOeoRLqg zO at GI?4JmM7i?I at l7i<2Y56n}Y`}&2OQr_Of-& zH-o2zk(S*$L+ at O}mHWicU!hq)g{+^FsQuW5Ps#q+FVOpYd1p3A%_llhy=s3AA0YKS z;3B(BO>ul~TY|pg-D3=&S$0?I3|Ht?uU!q=)u>(5v}?L{&CsqU?K(ocW@^`w+SROG zM`_p5+SP(r!xbrFg^7M0YF8chV|%eN7y3zbAKsA90OJd4xrtn|1n)t!@U*2l%uza6 zp7;rESVS at YzuE#RZcR2qpcoG89MVC%$9{}_imjpz`jlKEQBamgb^wR4m%l)J{LDu# z+K*9a^3H|xmOUMo-|v^J2C&cYVbA-Kb5`IT!v&i-5PCMJ7XU-ex6{slr`}|H2SUB= zombDQI?k~lZudJP5Ju{trrMG1OEgcW6}X91nQCG|mA+&*BxVi7w3WHon`@O)gNgHA zIP!{$Dp_v-euFUHvio*>@03p?ipg%=g at 7f{HDWhF=6QSpBl)b(@&s;gfYKr#ry=g? zh706K%I+!Qro3a%dakS--#w1RbMPL at ni8>R1p$9=Ev6m z4GN~3#2R?b+SKv^OYlB?->Sh>Uq-u7PrGn>gNe2`RXOX-Yhqx4K|4V9ixK3ax+Tau z>U7x^;VT|^RUzpkRb!c(3*j|HsoH*6YW%rnj*0 at xqQHI zfzsztBnJIHrIVOLWi_lfPT9 at Rl~pI{1NY0JNk{DFEm*XDG-Nu8{r(QZ9zkM2KFE~C zGQFeG0W)>lvqCh}|6xt3YFMWK+iFVH!y0$ZnkZY++MvuBWy at rR|6OhWK zvz+#$9{LgZ61$Q%I)wpa^lPGloOx7?&=RE%BQg~-(_M_2qJzvtS at iE0j*ml*=)K{f zQ%dA*_*7fG3eoL^O0!BmplneI6O?)twVvTnwQ%3>GV7`Z3wblNg~rXoIk@*|gITy` zcmcN!2c~hRB(V5vn78pv1o|$nI$d*#fYA;;h5O|v(&}r5N2U)(k}5MOszzKs!*rKP zX}Ac9+FF1U7vIW^gtvM~&V8W;*I$;)Pmn{laP>U`>Jw zpfFukAC&h`x2==5tqa-~pG48_(P?N~_LP5?M7rx5NJEQ-KLe70s$A6raYg0kH8w;5 zDqxrbiclfOkA?W5aR{RkDFG#>+5k+`L*BBLA8;JCJc)zHc2urPII6;HM at f6b6H({~ zi>S~)bq^v|t^nb_Ny^XI4(aeQ;VAF3NSgs}dU+poz}(g`^VRZ1%XsngxW{p5O||Wi zq5KR3wUsLXy=En}4uEZM%T>K`13b*Xr!{Lf(YZuG5Vq^O|`@_h({ zB%TkI9Qryasi9BbGJahbTsh%7fSM0?x|Z>4(_8P+&u_6fYEG=V)6sCm@+a>tiYSB{z#r+il&bQn#D&!Y#GEx({JFYeG-WoZ0|%qFhC#K56&gP66WJ) zL-*-qQ)r=1E(3l$VS7W0q{%Rn%a6e5GLfo*DOD!hTTKxiZa6H1fp4ypy at j&7SaTkL zq_7`z91 at n2xw(%0LKQlq`CPd&ORhA at l{s$tdj(R<0qAQr)KZWJW$eU`m#s2ylzDU1 znZ-52BcCVp(`0^z%ooUfq0AS{Wvk>eUM^c;57*YT2d*rpf`;vc1?7DiS(6*=7ph5v zIjDZ$Bt2~6F1#Nf0}6Y>=mf(_QVSXk(npq{Dr8EkX+wdeS~l5;dnO(;P_!{$7{#k= zrm3EMY2{2 at EOePXvfCtY%vYwP$cr1G#-!W_-P>b7?e#g at g>{XEw~6&qLsp@<3M at s( zdC?fS5`Rdp%_*wQ6EmakytAT{lWV6H)#i&CQFs35 at aR7b4G!qkwbN8 at 9!byzWXLxV zw}FV2H|Bcfx|zykDrsdGE}WQs6gD58YjdG?z~)j48}>Q~zK>y2>Qw4j*RV8sp>gww z6HhlOrq<~9&C;?$PFhxsFA>j>E?PI`6TUb!GgG>fx9)nMU7dgYBXUSyUA%BggZ21F zy7h(ja2;(s&^+o~unn2Vxo8A>ot*r(NAPJe^dXm22G`|E8)t%UDc(pb^1+xs-WAaC z>bhyF4^JDCk=$aslOB;|4;Q(oiDM8BC57xRkliz6?=)MTmAI!lPSxj-g=XH?S*fm_ zsmz|lw)?Cs;No*_0E#DQtTo6r17yBg6$fnXJoKf(UBD}rNw_RA0MNEm!VQq at fkZ5$ z{{~uyTw4I;LtXKS#5+yrE-3OAh(;_K<{pqCX@*X at Vg_(o()2P(Ju^J^W69iLg6kHKvbZ?-fa#2c$PqKG$Ig0I7abk|5RJ(_ODK?Z}UkkrBy)EH9uJMh?6636AkvCv|Fhl~AsZ at hmUm)%I zmSU5DuO)#@2|rfC#s1TR<>W`=M)C&C8`396DS|DTB_k);Rp13cOLk`|&!593853xH z7miCG>ZLbE06y~E0_>bg_S3OPp(ID=+C!}H9awmkg}$l?k5A*H+mfWpW|{z0;TToA z45x-rMXt3rUN%{RRk#bko$CUVSVMU|7Nd@)O^0S@(k4nDfaJU2w&D`8a6?OtLWu`> z at +~znBwyMExoLizKf|gq$cv51@^3*Oh^fUD!>hH$^)c3G zr1j-MeQ~UBOaJit3`6Ss+30JvoYgmuywUUl+EQ|viB#m#8s)j>Yc=tvtExorFv8Of zi9s$&Fr&203iC+Xkksa0O!b21mjKPiaRi2^s}CnGOY>p+T3{Z!!*HJrnQN6 at r_e3h zN}saGJvp}hFn=13IF7DeVe5=P#M=0uqyC#V%C$~L)~su_sp9jYZHl?*l3-sYC;%Wj z44swA>FcRzNS2X>2X=@9n00I_D{=KrcO`VLf$46iw9yIF$O7!7l};c-x<&>QWPY5X ze!+|e07c%Yn=6CHP?0CQy1?R?NGfyXs#C#Z!Wf{IqcU&J2*9MpI7W42V?cZB19h9O zQ;ve#fUHqgK;H;e`14fU0D9Pwa|>e2GLq8c{;#hT$Lt&AcpG(XbVklv4Ts3_$j7G0 z2c{|e4G}88at!TEa&%$Ndfwn33+8g zcA0n=BOU)(QvCyEF&)na1qHN2$-{X8u!yGP(VgO$NL(*{Vu;-i)kV{R%{eXNI86O1 z?2=t){(7Vs727Wyor<1wu$LT>WG5D2CpL^=orrbIZf7)oG)_9rua3ZaEmYco_ST1@ z;x}nnOQOitM+FOU^(JqbjCpbdzMD=NQtVMij6>+;h;{YqTs=T|7`id*h$JylkNFzB zM>AqhPNQ0`ds!r&MxUK{B+3qn8Zg~4`z$i0ZKjB+Rx0%Jq&)p!8j zZ%5JfOWAb>Mv>Mvy-;B6ERGV|4m|BO_M9$>`wd@*JDB5C9H`Wb~G!r)$31T68y8az~CGV99|*q7td zlj;*bvhgq7b%UkZkd95FO-qSpI6Fh at 6X&)4#hx^`KVLo0wXkxp4T^2K2^YCEMP^HOQ+*|S*R2X(JME8MIr045VSA`g_EWE*))_6SyY53((Av|)XLcZ{gQERueWkAU_9%{z0P7&pfYSPvW0uIgdT5H-<${_2qd z!{R`5m~D~R-fIcs+89W*$RmA_TGa=$v1NCs;gIG_IR!ilx*}A1{A0rrzyaz;u=re7 zzhq3-53wI;I9PU8|W1Y#~MBhWK)qrM27zC*7E{mpT zQ)-q$m*4G3tjT8$dDU>$2u3R6Aa-mlt3LzOAf3n0+7dtro!5Chu6vTA2~)+BofDBa z#!-E9F>&)>(MvUkFZI&mt9pt5axeYQdxyWOcmAgT-|8LyYkNocHN7)PaC8C1uE%;i zf#lF1ad>B}&C%R@ zNyN}e6~lMINd;KrGePSgVQ^2g9kuO`8*QjwRXFAHQxI(-M?rgZ^?-TcbppX1RlRG` ziwESMUe#+mWOMZZAA&UjikIsYjQ&=mNp~gR zL at n=a(J?U+8$kRwngHbAhgeuMb=TBuSS!m-y17|8cdqQ}22xIErzvzx8?77O#ZUqTS`F%@s(x2?{qn{U$9W$9{jvFWnw zgfJs`TA0RMJwqt(`-oEh_V=i5BF`^?GSEC3EnOTbj)=NU_Ec*ngmiJV zV6-2Mr at ewZ`H0te;oXVHq_a-LQA3r<;4PMYGlF5!2z8~ybYv7qNeyQ>Jck^Qe}Pt? ziS37p_qiobu6wDId$h*My$An-S|?Wl|EfACcMks2GADP_awk^<|0m)9bNJu&pp$zE z{=bEP-a}4qFZ}=OVJFuPM%+R8Z}vO6zrlY*0Lp>?XYkMdu9JHc{tH(*xyA7R4gA>= z?xtYB0HX at se$z=sK3H})5hpr2O+5M$k5*T!Yjc at LQCgWx&xcL=_)@E1_OK%XM%B7p zN3Sr7)R>FhCSjCxSEev3=?H}faS~cL$IjrJZdMPkbe^H!CFE6O;EVX%WWH7TFi!h3 zGufZ<2yss%4OT~D?Ks;h-tfK$eRB_>3BkN_pxVsz+1 at gAdMcH95G`+247@}%Vj94<*E9mHg`JzV56uf5yjlP((9+(D{Ltyxy- zqJE;<^pOs?eC~d?DT*!<$NQpTZrsv*jPgK6!1#deC^_VGxXkr`u3I9HPWhXuEAU-Q z^MByLc|6x~$C7uq87{9-9# zs-Hv_7Lwl9`GwLsQ%h;K350lwys%JSS1esIt at -HwkjoTW=a;_0 at pOR?`fQC6B2>Hb z4S1YB&!{u#=9#3sjAWh({lq3p^2uaPmPc8u&_O8AGs$bRs-qbr8Rdriu!mAmDjX$_ zl-3t>Vj9eVHSI$wSXqgO)8Y04OS1*Xg=ltMur%Z94XMp_yl-i~$h<<1sHOQF5 at KUM zOm?_=bkNUz1ce^VB2h5|hV&u2M>0|L_)HkJ_YL}qsY9GXCIE62^(`>Jcl;pmWMFhP zE%N+JL>%yVm8ocm%v%upDXOWE?G?N__Sb(a^wZZT;@!Hbwsu=2lF)~msVWOvESbk& z9mkV-t4KrQ>gZKLRme_a$E#~*`jnS5*g;`H+Kn^i)wzQ-Jvd0yY(~@3(MFu{kZGY` z^eKL%Ft;-p8!<^2En*rebH=ZeF6M|MNee14Hh9+C50XZlT_asqL!_p?4L{YQEjvHO zU9~TpRDs&uNL;xd+4ZtLjC-^#drtXCd*nlFz%;VGI at ECH{BP|C8RIN8cIia at N+owuWzc>a8ljVGo(Yu1>Ssy0&9vXYp`#M1$48TtguU$q|td$)WiRZ+LfSndu|zVmq8 zQ1f!g{LAT8FOzp8AL?yyNDVJ at q^3>2UX0t1h00W#;n}$U=*PjM;-%m*VMOQwb;>)2 zcY!reORjDDSJ0)iQn$c$1{x~nI$Td%n%}4IgaA^|0_3l`to5X7yJNrQ$r*S{+dt(E zz{FAhl%+Wbm#i9ga-u=5+DXcHgo=NRhgI#gAG39q7$V2dLb+?E1OFhWqLR1OvM0xO z92g}_3hU>pt{t*#D^TBX3;^r9 at D=7GxDfs#kh{ob5=S*Pyoi*xNUu>P6{w9-NN$U? zoud0P8AocKZ81MVMz>aNZ&}o&BcsXNwxf|tdfD|9nAlBvLYoo$E}n^xodV=_LdP%) z$M3 at i2-hc@2JJ#!ZM*dFPLABw;)4F$Nx&!|FhX4*~E6K)7Mu#CfuvhhF{-k4^%DIqzXX4vh{nwrkzBmNgA`|_bw~1W4X}E}n^iaa73us7R_nlfwLnjT3X;?_ZnKYb1 z!)Y|kr(qrqb7`1E!z>!^q%|&P_0n(|4Oh_6Ps3F-TunoPhU;laUp1aeUqznUOv4r$ zK1IXLG<=$dTWLslflu8=!xw2tUy+?UjfVL&qz460ZKh!(4L8tmJq-mKuBPEC8v1Ft zf`-dzxRi#AX}E}nJPjAnu#ASKG<4Fin1+QkoJqq122U|8qhToxoir?_VId7?(vYWN z9u4WczEe#!%%Nc}4LKTS(a=gmGfSm)ZKdI68sY)?hll(^CF+v)(2MkQU7;2EpgXjd z=9 at g^6j~&U@=X`^<3f~>-I^)w*Uf%Ic&quKP{|_2_(*Nu at s2!2GWV%hu52U`a>4$F zq1SLx`c##ARr_1^4r#x!=>txuYZ7y(;zc-)7ahn&JaDv8JWz{Y0BR>C1GC$OJDS77 zC$m2kzS-O at d;}&oL}f$N8Hh at Us1xy@)z&ebORhjZxO~Ab19&-n7+Bwj-%23mRLuH*9aiE!P*(5~$CW=Je#^zWT8~uk?{c`n$ye#ffRLk#jljLXR48czL zpMd|Aqpj&J8Le(z%Y0p{TMxyy%-2gD2Hf-kg&$589=c}`mO|<9xVNYtp?;l$+RR|2 z4up}SI!tXDT80lUrSLC?|4jJLfPX&xbK#$bZ7BUww_N(;Zn!FNRp8nK*B-d`!nGH! zeQ at oAYXYtbxDLQ|AY=?7n?Ua-lGISK(d*&g4F9bd<1*FO9c7!JK>t=D1z$6*u! zawK6x!fgYg4MhW5ALI}A!Kg3Jzcc{#4m^(aUU&y6z2kA(-ejL#WPM`8KH(mxeS*N7 ztq{zF|5Et3x(!o~w#+w3=P30)q15}&Aw?r at 8q8o{okJ%vrSlNT%|qY-Xt2-Dq3a%? zEl+qspbsy5CtTX#ABKM?{Exu at 6#Tp3fBtdWH at QITT)5`IH3zO)aLt0N6|Ppen&E1O zs|l_qxEkSV9NssKM&=FB-|)|ef8mB9ed9I&)f0d?(py*$Oto8kZ**k z?K at 0%G>64;O_%5>8=V|wX|ayGI6S6ax at 4@s?ZP|K2L{1xI|@t;>>JaG{iGA*jRBVp zPz5H3pwEGrLi+jNLcy5~?qh_53hLS+G-`-I-DV#yHse>u$cB8AmvN3q7}JWmKz0;q zyz~jlcok)o;Rp(*{ZiEU?^GNqWc~nv-3a0(=ifo~<^(gkhE(w_V2}7H{34ABIusXV zj6}%Ky3j$38$sRy6vd_Sn_9=gSOjBoPRn at K9$~r$^EU=lh}$F#!dMDOfdD!K8~%?L z%aU5wsFv|?wJxcxYnK7Sh{beLd`Y^7F at fwF&S#*2H=Fx~n=kCA7+;51K>2{2DrKcS z9`r$YXz}T_=VX#&6j~Ib{(O5(!DBd7c1$5%4X2+fOP)NdIGh|;)6M{Tve+9kD%f*! zL!aC*;J6@;RW>XDvu9kbT%f at 4=SW9E^2yoaw>*WmH{pyQ8GDpQ~f0W?01 z%rlaaj_9WJ4PzZ|)mx-Q#)gTTOidTn$DBW1J4U%F39g-ztgM|6lawpLD_icR=Mz|h z at 8hfQu0CGbnT+`hjTzvTHq=ov)o|@#7*_!qV)()9{oZ&SwQR&!ARN-!s!)J(%tKs$Q at 1TP=qrSdE8xy7TBp z8(gCRtMx~z#A39?@Em!sQpk!EZUh7ju0AcxGoT5l%T=alNP at g;yEw7N6t|Fcd;+WQ zT|ji=y8$jx?D5f~<>d5W`1>QJaIq9UAg4>mRW0q!`|zL=;xcgM%pTMVwDIa>3)^d)X;wg;9PXW^R#PH4Z>I+Azr*zb6>{?lg zoT6s0r=JH%fJz z#h`CB^kN~d9xMcUsEaQ4W`WRyP`}{_g!!8e;n^#syxY?{$rswFlU~^Y*>xMo0}$ozzi&ry3p;B);f89Jlc5q7BO8a&(X)z8ZX;K14I&6 z77U1%g;;GuXK8lRn|#ZU=jiaAW%1_5%LT%Ss4I(ZvACl(qj~d4SCFI9S)*XG?0rLu z6~wP^`oI8{3I(D2e%W~OdWciux0}Q~`aMen9-|KAiZ3Bm1D4-cWf_jjRRh at k*1galo3x+r%OYT7`q0X1%c7=3_SY}pbvbWq2Sm3C<%#tViE zH3P%?5rjlMCkaz|s9jS&yQ$?n^KSWcYUg%f at xQ@_YK3k1TEY1h^C= z%m&K83`>;eU|xELxCWCf2XIza4lhVIte|^_7lf0}EszA&%~CGm3#=R1sc1P~WeR>@ zvR8X;R5|X#({9phQz#%fC*~;QR{L=!Hbo`Ld)tJSqz#N})r>LDfHIa1M-rB2f;yA~f4NIL~ zURfxxZ*i`v at F{J0Fy(4pK<8Cnt!Eo$T?t3KrFkXR2Ec#*9*vE!qeXaq$`}DpwF9dv zp`;7!aFr`-jE;`_Y^9+zh13rUVd}dH=U<^vkEJiIrw0JB!bj-Xr(1bt<@f0J9w at KF zm@#L*?Si2p0hKJo=mrxWPVGtrfFlPU)xP#HrlPWfG3d!uNnI!dQ&PuZMcnlR%f6(1r|MK<`ube~)1{0Fi5a_${*J zclm?@%r_T5&i8vf)2(WtIxwetrED&NDa2_ at _(I0He5y?7>2U~pE52H-d~Y7o;${?1 zlRyXY at CuA0)`YP|H3I^kI;|KG6#5RoIz*4_3Yh(k`${-vIrLs2Lz#%3hyn$Nph@}Q z3${7DJL;lGX)7bBBdc3k2XO5Ge+f at n!a}xTA-N*mZtXIIRhMqpz7Hj zkY|se)uDajXTuk%x2}H&@Pzw{`ZBcoT3LO5e;Dg~y^(gB-0(EaI>^4y+QW|XP&pp* zqpX3#J^X<^BDYe3Xc`Xk2@}JFgfO&nmCWE z&r+ta^bv#U|AE{7(?Z5z7eL6atok7T<6&np at W z?6W5+p`MwG2Oj?sPe;yph06ov(&%XiE9HuGQvS3&TCTu-i|}YdXJUmB{$}`R%C*zv z+8J_fo?M$R*XGEzxmbotuFZZcqsA|qs>qsYCKb1TV;1vY%ZRf6<#U3 z)8P*#x-;Q#bV6VXxwi3x`JTFsouGa%E{5%Kr}w+I>4c0?`RL|O8?_sESV190H7|>! zifY;r14JqQK*;qJA9B46+Ej~H!XZ4v$900o6g9h9i^X$y0#LH6i-+Jn49;NiE(Rwt zIKe9$zmEbWS9a&g-u$AP9U=^ko#JTR#_7%>?tIyuo2+rikQx_%WsN2OOpWGbjXAP6 zx2WbttZ}Be)Ak)C1qceUfrbw>?Y+KO0LqO4i+}K{UqB2GO7C`~mWc!U$*N z29r;@QO|h2f4sqs zP2zL}y&gfcz#6}pCUs4Ybfp`NAqLn+0iF4cLYDHxT`7z<-HxNETP>>T7N_`>FE-%R zjUOVyFKGLe0~>KSO}EdhY-*sCxmvjVW;KpBkVJTvSv8vvL$0hrbw#Nqbl$1?s+8ksbSq(oN3I4Nx9m4_asXM{DCF?axRf#I0Pvn+!-Xv-au`pI)>g*i>j z%qU!OMK+BVH2o2po#lOYNZxWh;Z?er)i74eLy^3+>Pz55{8N>o1^Ff$u)D<&nRswy zq&haiz;UVAjAX3Qko1G}VZ?OwV5Sh<~#+9oYrF!T4eY`rzdZVVjbDQOLN4+PjE}AkT(U-n0ZZ&Va*|^-T%njD zhMiKZYR1^`@nW=fP7Gf-GNqFon9B94s-v?$lSHbY2DQ+b-+l|~G(j#NyQ=lo_AqUI z9c|gl!c at y1nn7E3upP8g;qP9h=-YtRE7(eC(mg5mhqgK*&}0`hIg2)#efz2kzgl;2 zP-3OzwEs6Z%;l7|>^;b&NMaC4DbGMWUj>pZWc{rGaoaTup&+Bb4C0gCQlYU~%J=U~ zHFm9y<3>i~rTuQRBoulT?nylKHK9EAJ9H)bB|Px-mpf>`1;gJ at gA(G`vHe>kc|`$Y z(`{PPN;8KBV-0ErTo|;1((kYd7L)?q^u at IMY(>2zRrA2jjDbc$)lOj)Edpj%9x}Fj zm64aw>Hv-3OM88&L4hxJ#ZM4)r7gacnjGkdG=20PThMjU(J`KgSyKfD<= z5Vnhuy3Ecp*UJSM1M5y&e&1ld;5OrSF5G9`&_~`EOa#<0R|0nsCMYMVmgaqQ53jlD z1Ki{WQ?mX9*3^X!>;`PxAA1(BGD||SR`77T+raeEY#pSK+C&zR*JT^=}24U z$KR%^QI=AM->Q}IotGgJpJ(T5Ue%q@@$nAK^TR}{L)msD6YiZeNnumX(AeuF#ne>z z*S~_4&ft~h=jh=M8XM%`ad7BgRP3jf&pyVcDx~cMj$1IO03BgZuJZ6hAemFfV1G!IPrH1_FFSwxw&f_fQU10CtJ)tNseu7n%E%a^0$ zy~@1h$)md#c at -0u*sB2wKg=BldHr&2Fo`#NmG>S*$YkC1FLb1HjSYQx*d6-AdYP*% zfyaUH_*0gClj9D9pd6zP;rrZVgS(- at I@;iF*5J^(LckmdjgA|2ZGrMA)TiDdyZV$B z*f24H?d>zP!?dNA4`90iUSv(rTt1}f4`S1$s2{MwMy)$C*r2jy$)?}$RYpC?nogna z90Z!OEZOQ?fUB1$TMaW@!+^QEemiY=Z?fS%UuyV6-(U?NF|^_N$%db?tBNT`_qmdt zw5J;eNDcJ#FLIT~mt#-=LU#2itFhkbke==dZJ<5vE|gupAl&I&(KWs5ngXR1v)PeS zv$4Zmy+%f=sLFDQZ>zh}C!kD3z}@uK(NoH3`mhZbHlv{pIeO)bWf*}cuAnDJIY%G2 zCm-Kmh6iqCJd7XlKvw+MIxIyxkb{Sk{R2<=TY>&EPv|wg(hodfKZM-5a!oJ3GbZ-Z z*UbjxVhM(Qbl2s+apQYf>GVp(^(dx1U zXa6taz6Lys>S}m5yGa(Z$t;pUkN{CpsqrTog2V+(fDJ(jE(Ugq7|^Qex?)9`;inRk zbapY5VTx__wY1fWX#H*Vt%7I;5`sw(s^O;^fnv1L-gU7?L9&pL`OZ0acC(4Jw$Jl@ z&zI-P?9BZ=_uO;OJ@?*o&!ObE4^IA+DcXgQd})936iR;8;N;Jkq8$v$r}ig5)`!Xa zplRAnNWL$p)lwQ!NUMtzz&tlg>reg%NM2LLXdAo}sej306%DNU4J(LE@?@BnzY#}x z>qmfOnWh%hjnvJ>x(&jm6H691$1pbSV8gmZ`K)=> zKh$JJm|dS)Kdf%l>^eswENH%2WbEUjsW=cn7xWe^u!;F{fmyvizMv7s`Ve(kd_mXG zt#{O?)s2{2XDf6zFaIWdwe#8)6Xt3jbMT-zlIK*%L0;&|y7aMg_y#hD287BjjX z7dHNe4wu9WwGtigQc~Q*MqD)}ZzUm2s6)fUQ}HZP*!UxguR?i$y+3)=;_g1 at 0bv8` z{S}7LV3V4J8HxzAPGKWj<_+D3;V8ZcbrX0*2)D}A;`FJHM$q8+1z7%;aaevbyl(Q} z4ef)OTHWWj$_>NsYq;C8J1okP`-P2TsOK!y;M;`2rSy_41kl%Gi4edsQX(!W5wD=G z+KU`>WftNPwbg6;cq5*sB}2MaZ1!8iS?Wr=+-DXxpjNTmH%{0nV=j~w{0Xvg-2*jFomhp|z>MK-L;gQh$Lw6QFpXRriqj(Iv5QE1$?6VA36JFNI8aP1WTnQ!6r2%Uf}^ ztFA1s;yNY{pAB-{cI0~DGcgt5 z{mrGxCR3%QB)w88JyEHaI4Xwhu?4D*9O0*;MZEInkr!)#kt7~VW+jy6Z6~< zaux&w{Z%{@))yRBTy3~7iaxDCtelGWA*zS702=<-1LTs`V%onS037CKKkejx1ctPj zP}GFfii=_1!$hQQAjG=8s96EU!{RJjK?$(ROtq>L=x(K>ex#aKH%w2OrKdo6by$}e z%lnr6ClqS;>;aT=&p at O8ro)Oe?_j}j at iS!sQ{OL|KG`gug+->YvsFHGx3F<74X+Xe z`^@D+;ID|iGevyILOxR>J|v$h73auj?i2z+g0Ev93qNlh=UgZ|%_}o%fOQA~4Y*ev zSoMOMRA=L_$$@9=Zl{zWT!u`Uh-rFx9rJG&x+hs#nm+&idCI6W{W~I%-U|N^v15Z-D)c9G z&t#yYEPQn={MT{erQs2=@Snzo&jxx|_vz9x^#fgM2Z)$85_Pa(gE2y93cx+T?uG6! z_Gi$*l`duF9o0%Zl!^$5<}BxLbZf}jBwfPZzAVOi- at MRdnp|-Lg(haGhZOPyXf#`O z3Hy0}(w}`dDZ1VRM&+o>M0ZI4px()ZDu$@Q$7*O9;UcZE9%23O5M4<(G>L1bTq+g; zV1QYW;M8Xg)Q-*DTU1cmyK)5Z`pKE%vidZ2fvs++?y{k6$gcV%wbWK;hR=W@@Z3e;>MM^5M+I z%3kC;16y#{%;RONeow;qc at C0ydW^H4Fa8l*eEJKGCS6C{Z1%Waw%}=LI2$=WdWxNq zNvGK3xyLfDrFf3J(?G2_+MZ2j|F{HCv17_2=iSq{OBrm}B{9UjJ(BOAmD0cvJ3TNt z!3-Qd#NylaoyN8$?!>Zx-bGE+OFPxBL3{OIk?SM1IexD`{^)7di-V7);`e1gec9N? zKmRU&(|4i=(aoekkQ^3EkmVjsIq8E^46HxrkN>uQ`=DsM8$+Lb5t}?HaQDDKhubJZ z^myzh1?M%PsJ z;v%p#g5x?WZE^J_5V}FqO8wQB=o-gbM9E<&QHkpezk2a})sy7iH1Yh^C;T_nCwx!!-+pkO>aCbz=D(=k`rlM< z{hsPqesCTV;9LL*E$_S}fYJX231IT?A%U*Ho(Bmw%y47g`H^7zZ;)X79wagHh&T_3X$tG9XmWT;>*}O28#Ks;tOY6b&M^g#w&i(5Wgl#UHo1rjG z32NdK#NAKPJ?b~kR&hk|w0J>IYgv6}eY&|W%UtIuoGX2^Xz_RMv(Ppg2%BCt;yk-@ zDXhdb>riyJ;|W{vu94KM9&vWBy4bigSLV at SoxjHhYt#hwxCJlFuMBf{4eHlwi}Pdg zqTpU}jJhx9eab(uex5MrjPvZO-!ERYME$JdyztbXFmc)1j%U+BUAhoGbh3oeG2sQx zTP|6Q`O%XJH(QZzl18%Q7sk>Yg*1zJBDpVN`Kh_WD<>`8EiPPm;SRqdMO0mF&Or|) zCky3>ZB>Sfx(Z&knSI73ELu<=V8ScymXFK4`hs-c at +)zprpehOUZTFAv+pKlL6@)% zTDc%a*ygB9Hw)Vqq|`ao1zm-~gVHCS_8h#jD at qMkXqI2 zDU0R1g6BIfUZnQq^!@8BfN$r=E3+(B4rkAbj5vV5bbh>Q6!izjRYg|W<04PwNDk-8 zm3Hh}9#s9Mvv1XP*vMvT;(OoIMCWnoVfEv%Y=MG~ugK!%W%QRPe(5~Ed^FXcgZ9*W zon0%1xacqN(#L{``mTCB=lz^^=SOREoXsmT)Y4N*fG!>@uWKgwL6eTovnvWz=(!39 zbYW;J^b>?o(zkiiNBfgDuSiqzB*y_=9l8Mbttz1QQzfu26Y5aWRDCpvdDK7<^C%0@ zHkR6 at o(+9=F1ntqvYDh56uMtONT-BIl^~+0;&7dCkTzn%rQR^!Junx02B>yFA61!- z>Pr`T*gvQ7+c0C~zK)gqJnT4rvk95~>d|-#&PHa&>w>q8Bwg_Er%WFiAN~ptS6jmG z`qO=P;EKq86COd?)E-(OuG8mQ zR}WRwgl*;`B`uunVNX7d&3AP`1=3JhusbkAIJGW$mQraNbHe&tji^BMK7?PA0yv#+3Fo~gH>*{1`dk1-Y~CM zDy?^)gZC*n%sVGtU+?aN_sedW*C$<6 at 7B?i>J9UBDMfbmS_`D!<-5aI3)>2HVcTpy zTnuk8jA!?Sr at -4ecsm!q4&Hj*E^?k+{uMXgOoq0G za0(t1hzWX|+-!A9 zw&h*%8YD1Q1em-WY8p%u3Hkv_V_$wEreKFU6|hXr%!m;}#t>WyFRW?6>zzd=_6i_j`%@6N^BH)Q!?fAFLQs>9Mz>gf9>VOlRpY$culc4*d?J at 8uZay*S1j_8a-2+~1g zz6r`v^Is>Yw!0Niho&tZg3N at Twh5>#vyBqQ5Y!;K<)Dz__=_R}$w#O_c32&6PhAsY~<@4MgAGU?v z{|L;|u8sn!Q%Z6(6S|1G91VhAt-4OAHg$+nddxkK-Sz@*!ZfM(Fx7Po=LeWWFg+@` zv!p&fdiMdSkzE9|)Wli=&=ArE$bNE%fCqYwIAI)+KdEl~B3ZmAoTU8*#kES|fh72! z>wt$R8y<^uD~0N0_`d~$s=W|Yy##_Ds4RrXkKOQCc?Ue!ErW<3L0I(z5LUek!m6v6 zY1?pBq3y!wP0|+NiL=ok5ne7I8DC#@vbNHpRXc8yzLGx?#PMn)UKUSPM<^q;l~%3V zI$Ju4Gq6P*f_4UqXU&$n7cO2>(exHx9L#CUd3T<=JDOlb-AGE%$OaTqBAqM`p^}1s zsGhM0I}@+61N~MCUp34tQyU2{Qnk8U{1<;d^FT?~o2AHYAvB9`tR*H` zfF3)#r<;`2WBANy$LE;C_{?v^=hXf1tlW?hp`X+f^fRWFeiHZ4&!r9YbJg4M^V^My zUEO%g*0iOQeyv*={Aw=SmQ(m0>;Rf>NzCm=K4vy;#>g$5Fd$kI+u=FHYHDanJZ5k? zItNvP?T}@2fL at QmtDbQfyI=z3gfX%Q*1F|5YqX&|XE#mu`>#chv6#RF|13Pqflc za1Z at F+dw~mcpH9xyD`{>BDl+{m0A_;6dnmteTYyc$S`hpucCG0OQ~Ja5H46HK?uQe zB8Edhh~OM?8iDQRs6#0PBy+pb=pW+z%$?;fVt?6%XQ^ChfqC!@qMK$|$%e9bJN68Q z;x*-__yj08`OP~=LMM3q3GC;5X7LQ at kj1gHkpX+myJM3~e1YfyphE8$ocJ+J9Mzy| z;Y!U-c?}*Ounh-sKNKkhMfN}_cvTgl`sgrWH>c2+i4s+mWQBtR^7{k)SNa-oc6xFu zKYwXeDh`w1T at VpK2^q9Mot*_78X_kLx}%dn)9-`AC?*_}9I)8Xk`2qVGhx-Lc9<2)&flgEn7HoppK zolW9JZV!9=MZ+XP02NoFF)-OI-2;=2hyAtyCKflVm`s;ecr-tx^>|n{M3xu2^?VFx zzq_9HmoOeLHJ9NZ8;Wk|jZ#zHYy_67jKKh(i57t!fNeA%i0b=!dyAA*Y{+JeyRTu2 z;_9p{7*h-%Vw##U9|I3gvFzr-zsBGTH$Tq{9|m(hu$F}BbbC>yVgt<)5 z_4=%5(P6Z-U!8gk3HC!YH(gNCCJiOYt8U?q;bi`n8JBOF)vx;&8aXMC1M{R{%0eq< zvKgVfQrEOuIpv44Iz8On at 7+t23MkDU*vKV{X;A7 at rC84`ck`$byPiyj|WY>`YQz z#{kWv2u^*KmF;`ZfKnV9a^T*r!C2HER+}Bc;l*D5p(X5YcAo zn_!N#>~4%CSeh-6;=_9nqN0=*NVxR3y1uEqM|Gv&*N~jE+v3V}F37A-(!``Q;RH*U zT$P?^C=^tkG7SKC%y+;|K|_r-k0P)`==F}T at RrrOnesoA!#Bn^eLz8 at zf;fdcP zb27YOdo5DdeS4=^O0(%X{@%Ul>fKU00gVt!BbEG2H))Rgr%6Nsd zv30=-y9esbJMBi21Wk&HQaAqsP2O>G;Pu1ilC;x2mj{XT2=wKl3$}NokL!><0Ro~nj4s at Yonj}YXz`HYGywI) z<%nKAVy&(AOrclTSJ>H#Xfffp(UQ^7>Mv{0ynk8IFIRn5=JlI=HF=+yVKJ5iqd_Yv z)@q4q86#a=1569eDwW#QcC`hq=)e^8OK3<=yXuO>c$PNP^3^6aq7}WR9@%uOoBa{y z`ka>0hy12L*UU4U${^@BFX435Wr+-1K;BZSOV?V1`ifalOl=FoATKy&Qd?_3K_;ZQ zY-wf2e|(Zf{6iiujwu1mr~+e$U)WOEK~-XZmmjH+lvaa z>ers7#?DseI&!S_ev?vchyE#pXlSg0Is)@keCvNICr_b0xtKk?-AC${w`PV7&dJ1{XUV(gyaQE?(} z6s?>HZ!qPV_z4mkElme{zE+Ny#VZ+dT0R1{ox%}-W=GP(a_}g+h7D&rU5=HB{)-oe)AUKGMa$M*MFJw(q27+oZrVPXp1JhIVq(n# z)?@ukD};!{i{vv`E&p08`BFYJY*m^+729V0%l=MWttHMxVLq+g**d!6l1945#2gOV zlu6p2`#BT{VNL9uU<6I%LH20>eMN|t}&EtI7x^Pr?8)3B9oFt?kagHYX_9F^k0`!94lp3d?~T>Kd+}O4=<{wr!-Wh z&r;`)cXYRRACcQl3WO=}32|A`xXk{zEQrfYX^=A2IpZ at iKY6up$1AV!dnjB4?-(?G$@W12w*tVKKZi%U0j&d|O#^7{D{VQ@`q6+5__f2YEYjL$TCrAayuhP2p2Q)i8EP zGz!XSLnhd`$-Ap&-1=k?OWW=}Sktj4VtH3f4;Dk*c4W at Z*0tkuTBpNLW=`vNQ}9zW zz9&x4X${91X`|vv-?R%^V7I;t86~jMdmE6AT3V5LN{PFoeDmP+f?3w86t#VNLDu7I z&jQdlDIVaOk5hJJoEDIf{mD!GQ{>8Q6R=Gbm>YM|O2d;0A4`B+-rIoO($WghQvh+t zRwXUU89hw1bG9f6?0($qF^=vQH_7W}nk2zLp4qQJIiYwa at SJ06@yzG}2}2WEaSZi= zzCjGN%)=>$2)Um$+704BLT*)WkE4swfulqhdx0)a5+P?2A*aU(*lU!b+d0?M$KE{ z4M3B!&Z?}lDeLTwpEyz)wRMh{UV7=x!p~9Cv_s;Y_jX}MR0MDo-Xe8t>+!b4P7{UM zNHd^&$>QkFR951Eohh#tWM^^PW~Ein=!8Y>E)(5t;fwIRhPpr^3P6RR{N8VlXpqp2A0&%Tvi^D=GF)Xy->O$mVp=>xw> zUuoBZy^sm&b9)V^<8)5&tKZ{ZM;U}eDA2h#7VU*hW$fl>k^9dy67wZfw*w$U)TR9qA zf}Kzd^6k96KcIK at F??!)G>o>iX}4v|J)>3)t(lWH#-Yv24yVdzN3F8sXSO!4qs|ew zG at DFC&i9u$M>oLszlU6s zInFN4>_85RpQ+516BSig`4p+snNP+5CjsGK=+QGy at EtgzwhyU9O!lz9{R#4K#ze|2 zpGv7A8zg1d_Dj}ubNgqo28Rkx{vckyg{4&)Y6ndRR;7&Dj*FbWK51-ABEBLQ(KAj# zL6a+bMVFxn6rLjYos%wzPtpZR!bj!4UQ`R}0>QUGGQ0=#CWBN-cT1c5Z?a?$*hVFL z2VQnJ4)aLfX{E~M?32>eCiYe at DXMSAyWvu2UPHn5WnbW;4;?S*u1>ijcPcs=UH*?+ zot?v*8X}~u)kE~|!@C=Aw7B(*h+&q;gNt|<1KOiOfkD^fg-;x4+KeoZo89^XRCsVY zZqPntRaV#w7BFEwS>vnvr-|P>(4?>G(*k>iKr?Bg6vIf_8$w&KamA#(fO9$8J-lyk zt|E14|69ZRCY)1Wj21C|AZ(i^Y-lD%EQ%m0YW3GQvS<8 at xN8~G;Qx_|+t_MCiHz64hnHn!5Rm)3zh4%oe_bWC6A zi;b~iYO!xBA7yT~>J{9cTWYQKmFRbw83v)1FSyfNMe+#YP9kk}OxY+{~ z)Mk9$Nrhp4<{^o|p{~bYi5 at lA*)2}+DwCO?bU#mv zqdn}OxbHU2RE|?<+X+5}qH9bveuIVHe4V{j*={^7;QtE5iUpQSUh zCOxAUes)_;QOzSgfBTm>#!cv&p$$#^Zhrksery&WMUwSsJ-_{I^c+m>iY2gKM+a~5 zYO{L&u~x_Y at +o5gOz> z at l;f^3JAQV`1Z^{K;lXpNWrs|b?M5wWzFkR*?30tdc62Iqovq~%m-M~%!{MQLuMQ= z#Y{s6YiE~W0yH$?vZF_)2FbP=u?+9}%B5^w5Aq`;2Rf2us~~+7R10uG at u#x+{vZoy z8yZ_)t`(@~V++*?dk(X<5iaNY(s3uq%>F!vY;qtG-QeLV2O(i7QwjEHD4?s>K%8h>&T&PjM*IDi)! z)#CAJchk+n&tt;kEBV*o_}4`K^*;Za!oM2%S0Vp;jepJLU(fQd68`l#|GIrDb`u7y z#{fXrw(RI^w$2BG>q2jw465_#!pq8^kHfi__fi#RjIKo2q2sJeylfdNayzGz@$2+j zod%#?zR at H>Azz-2D`YIKpUqe5Cv#L- z9b$;9OPXMkK{Z0(gv~V5__FYK%VDhN1D}IF+WCi#J_v#j8o{!np{C3iZ<7o_FIdTkOIa4rzr8-!`7 at 8XIQl}#NeDy~QO}sSyL%nTFl%LFu zj-&@KjE$t+fg|Zd8bzaV6#c1Z at F>b_C=4gF3c3%!cuBv>A+(QI%9#8#?zGsn8D^4_ zRtac(h;B0oZ&(+GC8fgFSY at -J;1AQor8>=T#rGMPNH3^&+58E0u1H^fsAf|C4ZeObD`%!F3er><9lhSTxif2qscAdIC*D!`8s{;9d8D&h9Paf`Dpw;Q z(`G|+$YA4=*oMh(09R_sGQa6ISQiVA?$(x@)de>BtS;sT4~lv6S+h7!J_{>m_V+KM zavtkAPI7#Bg#5|5utnaRpp+~NC(C4pUMIB;;^G<53b+h+&QiPn at RpLK6k9#)*)7Oh7G-g*W`OGGh5el6 z{R?B9r6QDvJIQum=5A=r=c55hMYvZG at tgfNwq+y$qfM<}h59$O!i(7dc0s7u~WJR5;dM?9n6#nw*+sP}%REkJ<`KndPi zX2 at 3eJ2WU0evj9X3^n))pPp{=C>3_5K96E(NTY*uyGfgpTR%=3DMtzg8BKT8^js~j zb)$-bWD7e{Q9v32kt=*PJxj#9a8yH_3|wL?TC@_p}XwbDl-R)op}y9TY0$Ek2) z=jFIzGsCyQf at j62*vILl$lry$9Ni*#wbj<2=lM=)N^n5*r4|37r)(-)8hUhEZY}`{Mcg!CU zJfGPp7`SQWd=MJvphHe&U5VNfzAdUluVAr0&Wy=6xnzTxTL6EKlX at km@sqDBNkAs4D z;E>D(Mx)i?%1P1C@{2|+QTr7#$wU-y6>TA<_Q4Yw;WKE7eoi^6QK97gSuqR>X!l!f(~LVzoMb zvlJf7tM12Y!mub-Z7TIo*iPyUq2 at khp|KsUum2~uqg8tX^{ocj;VDx;@+wa(>!VIa z?dmaj3K}WXg16&7wia~LcD782!kbfWfCJOj^WlOb1g{M13Q}E-f45qW>nlo(4D1Ry zYRpBQQ4&5B4IYkU(U9m?f+7~U-$4!wf=l~|B3^{Fs at 4yfhHk-vXd_)UDZvs9DN4Gp zA<5G0Pl6)u#(>~t1eP6ngiKUvbUSRGs)1DOT06 z5=fxTC=uSE_A6$8l7R_8J_5Vy5pL6e2LhNN1V^F151~wc7ONwNHxHk}(2ZRU4;cREJItS8S?=k;-F*Si9S*lmOWk5)3wZ2PR2W9>WxC-w9u(QXat1C0d|` zN^>ZW(=TLv998vV<&{{C+|qLNTb!2`74zAB at GIXn2jw0jtSB`kTK2Zk9-v}m3Y4FJLb=_Np`AD+%qTBO5eM zL{&V2E>KaU6m`XFCeIOqXj80awRwgRyo}xwmI%QtL}A3Y at M$!A^x{D@@`!F)gn&*v zSPGRV1n@>*jngRvmT`W_xWP zPJHY3&fxPZd+m1!L*D7~a%Z131giOOWNdNcbXLPOYqXNKJ3kb4r>?L;^* zLb7pq#6*^ds#8&}`kSBkpA%Hs5B;=j9GVuD9VwZi=D)?(WhDpb5Uc>mlI{Ky zca31h!EOzs)!=2Mq=pFN_z+TPP13GZ979q+&G%!zOfz=P<}Ov6*q^@U7ERe}$d-89 z53z;4WbheJL<0Y7$l>BBAF>9#$os-tg8-5gTERl8kEijK3T|d<>z0E2_aHR9j6KW< zS$Ik!-DsSUNR#yIeYkpLrC%W8{?nBt!HKJ$@YQO?F<9n6Q=#gd)aa_m&c at MuIIWDO zF|2+w!wLpoVeMc)PUA}^xpLW8u+sLSRcYLFbzBGRXfHJSvBoWU0ag>gdE%rocaO&%6)MtAqerNAqR9Jl*6= z2U&kt*I8t9|JxQ5}UHKjo at c}EUZ^@AiC}rW?AUuz8S7bnD>fOzT4P~9_c^l7Cf_43A at r`la9b3MY3UPMgQMe=o`K$= z)h_2ru~_cY#iG21di$)p4T&}=tFj<#mS`)Sic-RI2F%pTB`%C}*H!eeZ)|AG+X(RJ z86s}hMH$eA8lBi%*x%mgs24NfqwRf;_HhQ_EbQAz-uh!P4PL5jrzC{ypupzl5q?slKqM}J+XXr zbQK=&C38wu(aw|{YLf7Vb+sIsDvqs$e4}9fkDDdZ2WoRpBIap~By_i=l*dT9 at P2>bdSlrr#2Z$?g^n at CYf z!V0a|BRap7FGDlli~BtyIB;E_cUqB7kx}#V_87DKMyZxO+C-q zEMCL*!_eB&hPN$0Z==hVUgn_}aV|UXVH6Hr^`r7qffNyf4M1X0(rD=5dr&cc%kxyU zctpu9AoNSaA?hZI;sp?|IC^|y&kvioxMEgD2V2;{1yp<?Oz=ya| zMx2*Dxj)7dJ|kY(mA6;3&Z-0%w=`mD>i%f--tGgJw9fPD8Ow~#eYy0M&ffh1(SUBs zyi9rokL09?n1*Hl=n=9DSPrdooeCYnw#>E8QqlnYf+Xo=EiSQc6y&)$e63!cki#5_ zv4vrNv|~a!@CL<70C`nH_s80-1ZRt|AqXW-W`(xD#t#(5ud7`fk;XJ zc*4#NU&O~#Z)`lJs!8?MPn)BudM%sPt0OB at o{q#+jKa7+~AE8bjTOP9S#^6*CxcqOp|zA z$>jNI%#28gjhQB~Ny+5cS0rTluk&HU?lwkD&)QoH#)@;bi92DW9M_O6Do4CDTtklIxa)qDB5esLv3C6WFaY1-4!TAZEg!jaeA at UD{43SQW z3 at NWGnpX&1pvRG6Ig#NgY9!+5epTv42Zuk#_5q=}*@I?dp!SGvAkYQT2rpZ0irMpr zVX3y{G+SIJ>M{%Chv(Ax;hA%&e|Ubdf#ZP>&(;kFxo6_=#Pe;wllWLO9}dsQ;$b+< zj}3ovpXHO~&>=iAyNGBo7 zu;v64dlYkBTn^0T>u at ud)MKj5F#YuJ(ZYfl-LRqGU~Lv%blad3W=SsT>Tz)1ETvsE~{x! zSw=}|ZxT!h)RpMC=07a0 at jInOFR5n4fd(Zy{?ih7 at Dls|Ia#|K$;;CCdb8Y!8fSBP zfb+J45dMo+q+=~PjZIonU+nE% z?5)>$Gel-U&PoBgx2C`(CKnbtUH0Xt`mL1lle{kW=6WK{yraNovRO%6jEA}Q=z6>= zA*^4Q0ZE-ISZtGSKnFnV-YS&I!c&E92TsqtZj4pDu&~~7!y)ko;f=k*wu6nI*izmt zD78x|CE-!R8wUy&*u^1*!Zur_bDy*i(qLQ~RxvWF>6kXK!k8KE7#U5A9 at C*7LEh}Sn0 at Z3&ohz0}spt%+DAU5pUiOcN<4(<@H at JRgLiL-W@lIM$ywrG$ z3>EG9Ftf-IG`JOsgXu4a^r?G)|5|)m4Xz-uYQReZ|n(kUx zZ{9m@c>ZBP1M{Zl>be`Z(LsbS?Ydh@#nv;Dye$THepI zPi9lFP;{Sey(eLbvd1R*xxbf%Xv?*k!OBEVFPm-s=q?~Uo zC9(gcu`)A>4m8^>4KMSb3fs#3--az^{?mHK?YPoSR$lL3a)GjoA~Op>SM>SAtmRXZkUpXit^ z+GhU?C}evQZWCs(#&=^-?1fh)GkO>rGwBEfkjeh=EOhl9)@Dw& z8}2EQ&s+vDH02!yFrboW&c-tvwG}~+wF4TV%{1F%{wDpIzU|nG-vMOgqz#sW$NVYm zXE6hpu;RTFrR5Yhg`ltMMH^Ilp`bZ&xK7-G#o`enITe((5_29|kPQXeNpt&<*Om`jFF-Z`Lozi*T(bF!wO|;ZB zWp}&mrb6IHmYRJv5LkmR4l;c9D4D2`MnugDHniyEv71 at 3aP-ZbShKG%45Py$K_*y6 zp5t4$c67{>&ipMn2GD3aTMnM(77!uXntYt$h?WaDeHtBny5ul-n1)hPtfL6h1Xx<@CyjXWI!VZDR}tM3g!oCb)i z$?o&8E6>Hb`zrkIMUA|M+Gg}q<3$rDuKkK at kK$^O`-+7Pjs)DFoMT0wGS&pV457=B z6yf0ropPs-6dwL3y=6Luhdb#l3*L_KGjOx80Z+X&>l1V_t)y2qc{vg1Qi at D6|Hhlf)K3Yg^v+7uE6soGzm)OD5Zx77R4_o3(=z9 zr%xM{vCg8q4zuh=^z*#m&2HR7ge3$=(q)o8ZgyKVB#}ZI+)RpwoWzNFJze5PixZ3d zCg1(E2Goktj6 at Fo_TSr1{%Vujt}JL%n>Wd_93rn`*2e?NI9a;2+BHhC?r{wc$cl+9yr8ljm3LbqUOX_g8yO9Mn$BEyfUjqn4- zl25bN;YSqdM2TFLZ3c+|$|UbHnTqR}Qra$F2;sVvC0Fe?i6hAD%+DM6QIt(}?ct at h zsE5K?(LLmgK{RBwd*b#5Qrcp>ufJUtcTHrDhr13(du_-d7JJytZ*Uak?R7SC)+iuB zJ%W5;3vr8lhXdxSY6e9yE@^A zSlBaP8z(^X47R1!QI2G1~UAG)%EYhO99il6`3>)hEG|=8G?rfdL1=ubw?m+%Yyd$6+F4C6S z<#l+q2v*OytOBtHW)h6m=ALr4F8@@&p8e_}n2#@qB~^>HR!8Y_?VC#mS$zHv*8}L9 z=zEp~oq2y at TSQ$C9Y1Nvk+d_y#8f>04em4!+19cbWK>?rc6QOVd4X-B??U*#1D1_? z;-jahn+&h1(%dpHh|=rn1;tJJ*h?Fo4p+MR*Y+<`SdIW)d$)*{GbTBo*~s{auM20i9=sUNXxf#;yxdR&(DibLgU5NoXFvEiH8$OaNG z!xPZ#tVwXJ!0Z2Yd+9?33YqMS0S1-s$NFrqC1wI zL)O_j)Wi)D16v)6r&Dj?)sCZx=uyZ9b;vARyh&yjSQ;^#cxg$Am;qlxP=aV# z!}PY$y|H&|s3P`m4$Xu2T6)Jrs?hXUR6=MHyw{w)Lb_6Jvhto)7xgMnAS}+~f;O_h z#BK@dB3I0k#;3NE9Ed&nW??NH4HyIIx^dLWreQVN!IIi5nw%u-ar<0Cy z>w`#k<>JMvhNDd*f at 5=g(ASwkgy{0 at 6OE-J^t*EuQaV^B&=+gM``Wd}-L2kXW!wBRVIaHx* zpd!^08wSp1(W#bLo$rYgdEMA{VV8*;x)jqlD|C(0-5rm94M zwapb~yn(gua~L2*w!m?beWRi_N0hP^=~az{hw_3kJ5)<1wV`Guh`NR9j9w+U0f!SG zk~*t^|405u}(DS>rfa}W)==GF!ON`#Da5GqQAQ2P>k%Yh^LRxG*6 zdb%-N%;e}e-3Sttuxmzd*ovcO6Uap~dNI(l3Fg6r{fR~g4_1#{YYjH67}nh`A2f9z z>2I7d3Lb$zIig0SguJ5$?yBr at ba0_eytkX!&_J3{TT+^&q>Bh>O*+Y^5}mk7u%S8$ zqs6OKYWSvG-DO_(&>KLRjk(GboGjEM@|l8lNfyFpUE1964$XBkr at 7bw4A~wxe7luK z=nwH8qWWGZgz#j!A;I~cbTWs???5Cxfml`cAXXJlo?SE9 zBqkO at +HZMUUX#27N0Eu|2dD(b^M&GdFCe zOH+2v6CLy<$$}dFyj{9n?STc`Me$R}tJ$}(&rf0X_@#^1pkDp4x}$-bNZ6(Ffg#d_ zlm2a#8Q99j3H;3}jSG+BrGea`n zU|M^%0jGGWN|=a)Hv4AAPJRbkWF;nXxK1J=8}(zHx+$z&rMb91xsE36K!cc}YB;s4 zMD}^7>)S#t-=0^><=is1w@~;a+*{CQo8Cz)GVQS!nNGp~t*J$(C*l7E`2Pz2vxOql z0E1~Cz7`Xv4?jmr6ClhNI%osh)HL11IzK}w$Px*9yX-@^AE>(YWPirr at Qk57bkJ2G zIVg>IOsTSa*-I;tmrA|3e32gTa&JxlPn?3-`ym?g9N%w7`|lTf*~4h8sw-w3*{d(G z!MfE83xjWn-45O>&4)HOp$6!kD`VS&@phGcb5n{;>`3(1(s$!8{tLvv^{?agBM|@V zLGh00DJw7jp??`orQiN9qp9$z(P+QJmyP|H)d~HG9x(l8cHr;)pulfgqEvM_r)FO@ z!&Eh2?cVGdGs9Gn^{VJnt*@Rgw9BI4X21N2(P0*}KBK3voj}PT1+3NGe2;-b^og&s z8}CEwJou$#y*ewrc`#;={vCIr&1~_fF~o9<5XE-2wdNB`-qD&f2~yz;Xy`>OP>XF| zby`j&sIR-y>-MneI4`@I|7xy`(jp0vFVL_?kUP#P#rANrJlCEr at 6(q+M!C{zQj*ZQ z{XoldLp%)kG|I!*c-UHgHYz6Kj>_6{=F>kC`?l$l*QKP9YWw?tZ=SP?&wp}&{SKoUGDfwZcTu9 zNC9;H0&Njp$re6nY$%C#Hs`_UW>PAS?6jA;#%r>NU3wV1x!8_0=jCe~pdWRbR$^At z)VY~Ck=e>ztFvd_7}Slkvv3ZPpW^Bbm&&c#P%Dzp8gxy52HDyfWNVGUsMG_%+lX5| zHh^1Mv&D3OLRr~7)^N~dx at 0%ZOxV?zLap_swmMsQ1S+>{E*n+1Hdb9hRj)Y2z!O^9 z=J&dn#H?_tNd+#ORICdA1h=!)F6FRON9o83H^i)@I_q-qD=Yj)?Dg#PMO?{Wn8$v2 z%(&|z{#8jna9O`UIXvbwsWrH3I>5tRiH4$)u+wELmy+a;k7brz(WKvGT|Gn{O1_td zHr>iDDd at LZP-WA_q>51A!B|0M>H?ddM3(8*cC at gcCTweRvy&P>$oiyz^wyp*4cJ>! zOHuoeJioz6)H3m!l(v>`Y|&*Q%}qk?cz7Z**p?6+3o$6mm8HQ3FNw0 z(@>|IJ^4cr43e6Ii|A$;UCL5+;4%zX?z_`~O#6gw4Zx>1a5i?Mz7^Z9)6F$)D_xI! ztj}O1X*|-+s!_$he3q~++0E*CPU-7VIF)uXE)|0J!Q13nW+AXJ6{mA0m<8GDg4Ymk z=hL%pyzG$ysr<E5v1TPq7f7J7PVvMTguoM-uy}me4ODu>KZv&6!Ek zSZ!89&E$#F2%TzHBg$61=%;L*0?+XMHD~f9J0-bJvcj_N_L?(yi{6?u_lmdHoOwVj ztvRz=EU7uOP#P63>Z7KG3*;UjEPZ>Him7tXDse;~wKaUPHp^Tyd8zm~hoO&J9k%yr z^rhif%??ZnP3WVKyLgYzgdb^?8bs<7$$^6v}h?VzcD^K8ZOYZrxc$3_77Z764Lt;Abe1j%xQAH^{Onr=~g$R0`64oi#-ZA{&8K19BSoj52M9o6w4EhOItpFi;`z zH#CA655Q+93hMcg z{2pNUorwXPI}~mD{R=d34A*N*`_W*x{M(Wr>@OKTbPRy?_?ZD{NP5^ngS=xtC#ofM zQ5+N8gQSKpE+IJE{9QccQy6u>)pF0jh3Ml*45FEXA)0fhpYE at cu8fs<)!-7 at rv6g; zfUf%q?wWz1;oU%d3y&O7(cdHeP5-}!>%@MT{I)?YTOJvJ>t6>AT=VIH1Fz$fl=9H^ zS$5YxI;)EI#t2(%m7KRgsc>r)~-yrbzyvxC(Aq=zYAd;xJ3_`&J-2<2!8i+ua zI7jYTAq|%!L)H$FBdNmA3TTTnTbc;0QV8Z)aP#{K97K5saiUM2e&|zNds>8 at b5f7x z?Nf=t1`h}UlO+y6UnbzkZ5(q4;iE2?9Ywx%JooB1kR;kp;}m%j;*wwD;@qGmh*HXp zD5bsnGVKFra|*yf?KP??eNh zKTDf>Vjj%>PJ_khz=``blrUlqxfw4>&|JhPqR2#Zg%}6JM<*Z%a5TDK~r?`l&ho%0|&+X>(gq^REU+uHyl`D ztD^}sY^!?=f9nt2K))-ltS7R%Vz;pEtgx*Ob)S;emeKNsM=k zu#E}<`TLXPKba4H8cpK3?qGO~u&uSejiLtR<~i$=rc?;QyL2vIjZVZ3bDH{yzC%C6 zU6-C5>1s}IQYuV_L->?z4v!kpuMxC`(5 at IWSgsJPHxDAr58_hP^sE+_)buPAZLu~OZEVx{m* zx25rOOZaBL$v;#lNmHk2mNgsiJE>>Gxm4yxUZ!~8pxoB7#?P(n|Iqd}a8Xt1*j(1I>1pde+~&?rcX+Ga#kTWa^JZK>EWEyHcLwryYfZne4PcWt+A zZOfNlRFDF#c2}*;(k&{oGmRw*0t#_|&-0vnhZ(%EyPxm>??>j|`#k47=Q-z|=bYy} z_nhbX^(kbxg7eF#g22rEPg&bXpVi)+rpr<`$<7-%8vmh&oG_?$0?>MW7RIqKR){{xgTWubj=hE*}lFE+mcS&E$_&eAp_u%fwl;^FhMd z{L at 3$D*tq at K=;!sZZ_BXL(6g}$+57=lFhTD*x%KT?8ERpn^%D+7n|hO)Id8Y at M&aH zR3?8r4ts(|Tih#O7d4B at qibeNlIJQu#MZ1a-zyg=J at s)g$oY1mTmC6py3m*?Azsi# zb8YWn6(NtqX+3S09qLxp2-xg$vX1itFIT9k(92dfRa?eGJ2g9<)Gm{hfCIV1^Cn|; zV1nW!Aepr>^Fr<$W-+_V72Yy;u6-lvy?Y$bRx>(n5>bNSlVUU$<%acVZ)wWkda4*tpR5 z7&k8b#yQBc(ecgyPnL~`2MS-W6+YIfSvGJ>Fnha{TQ(N=M_4vcl5pz%NWk?7Xidi- z)W!pKE4D(ET|?;~uxl)(4*SOjasxq4RU~YSb+MNo)T|r4{esKUWJ0$v*H03BpQ5Fs z$Sl;hpru0_Oq+; zK%HUqPX`%2uz>$hj2?3}qeqc$^uThJc0Y}oej4I*qeqcu^l1M#Mh~pq&_<7<|I+B; z;qw- at KN>x-lPf8HuE?U%0|6mR))C$4 at yx9LNTWyWP)3jG-hZa~ccP~1RqvHtUKNcV zDj7YN4s at 7Xb)(19fsu1Uw9%s%jULZ%oe#NGh$vT0qD&vxI&fZ~`pEfMUV>M>Uy6Z3 z?hb?&A~?p+fQ&v8Nz-N z(NLXd2ka+#{r{Eygq8+F+fNqI8^R%tr3~0lR$v6tNCxhYwx9fcRoH$)M+_qDCn%o( z-hL86I_0d+rfxr}{12GDd%()^_VD%-%=B;9^bwRt4N&$Z4C3{kX!}XC{C~Bd(Cas} zpG0u9e!$9bIDQaEpY{y0pJ)`%wV&h+P=x!qUX$frWIpUmq2xAqffT(tcJg>aDl1m~VX z_LGi5?BO9QU=IeYST!(ZK$_1rrL3|JYK>D}QA6c4D-BfAO)&69no=f(O)2Vwg717X zrR+p`GBl<96OAaRaJfe#ZMdmEL5l at Vx6p{=;;@=a_`@sFc9oaT3)@v9crA*st6&?u z_$blhLruxRu7Y3}d-Zj)tKb9KRjl47OK=u4*Ftuczv*U_x_)j~*{lN|AF!*$>UNdf zShTAwj;z&9WLKdtP&6L|#h7znU{THV#j?GKs~H0(m6vCpYf^b)z at +js&7^X7?Lp~I zZcBYCR~9%ww25#9YXC&Tg14LY(=w4Hc2ZsA!r8{Ygi=E zbb_S0=_E*|6B4a2);Y}wI74A6|X*x;ga#$2;I>8U;*vNNzVZpzQ)Z8oX z(@Z8G&TW#)0k}TQSoMNUlqll*7KO)Quk@`=hSz***vjXokm%hf$3X6CP z^$)6Q*oZQ~SMXC)L at Vyny6SbO7ryYi)cW9<&aIh81cjtLD0MW|U#YiiQ;{SJL}w5n03sme4(9SSd2g=Ax)dWNC(# z*dooaLdh|BJo=ZTrd#UZbID~<8PA@;sb-``U);;hT+s%W at io(7HM!HIe;+|33y)K; zwAaU?q2)?=3BjjLx^AY1)`iD?6kMoJv(?1rJgR$s2;Fb2Uo^df?c74P657x at hmCm@ zrx+RwcwUJ}#)dMRV@;uqs1c_2l?aw3H&<;H})Xau(9W%>0x7!<~vYn(AN2`jdY^1 z-z>*>)zkt>vz+Mdx5%kod9%#Y7>|W5g`^&{(!4Al&TE_mk8xIQOxnO=Cn4hn&Xr7g6Ghr;)a`vx!e#Dt#hEt+00wVw zzdW98s^@3?>+2)@00faq-;p(Taf0RwV4ari_HZ(`bRBetgGv*gdDgrabZ{n=IAIsC z;cYcb&2IMHZe#;P2*h#E1`ap-sGsk{3JGk~K*145za84ftSaEO37B=W!iMmMEN$1_ zX0LR?X1g>_U1etvgOahK%F#_hCn_**fZbnY)6b&qXd4F%pLVHgl&f%hPg%I8{hEw8 z^qUF4IcuHdD*xqJG&{)?*?K=7`DS`55Qjg6waI0u32%m8le^=9kY_BaKV00*1nwLBv2HgRN=`+ zmts|?#3?7$WD}mo+=mBfmqKdsq~ypZ)`VIYe&tBEVJCZrnuIdrKAJ1AQ3{>HBNyYh zC12jxxYoo>I6~YB;#{AoY)z-cHD?mgC7CZ3o|IT?i+4 at 7NlLDFO{tsTSyyt}yENP6 zvEcsY&>Ue`ql>)|$H%Vgl>nBR at t(EBnbnX7!__03(7S>q;F|?-w~KxCSK3WRO8gU` zX3SKHkUO zSX);a@~HT*Qi=(hjyt2o7EiFzM;=53u(wzn{XXnFIa at KK@Msq8nINh$gjMvt-M)$QKIRv$FPi`=O;9LDlyXGZz0= zte-!CC2I+9Q3=NOVBoPTks1ls)L0G?wP<`wmw84kaQO+(eifBCmFsZC2XdnnI&-Kz z;o&oQCdZD;t&#Pkd78R;!W3nsf0mFPJORBaYae#aZ0wpF>6;n$8IwQ2NpGeN`uNs! zN}zB3fIa!WtgZ~|2mxO){E8(`(D+Bs%=bToFRkWmf}F_tXf}<=E2?wm6>aEON1IG5 zKT_IxmW+06 at 5%ff62$m5&Lo8s)0v5Nu~~Rjq;Js~G$4r3-ycS=&cR7x)-8hX at 6k-- z;VKp+DFB%9Gl2ftiOqpPeMBZ6v@ zd06}mR4+}6sP0gWW>=tgDrPon0Lfw9iV4Nf*1s2W at L7|0ZGbFNsG=EKrL*~VCaJe_^>Fy>Fi{3jmP{R at R03NeyFLJ`(R zzMo3Ia-G$iamGLLTEvY1D$mP)`*7rp|K#8q|4}X3if}S^@2dZ7#_!!eXvWu$54hRA z+>nMd|M~3v0m>7#`WR>c%U;tpfSEf6-1p4qlQ(F^f#Tgo-ZNuYTfGfw!nTCK1G@{n zDn2%%hku*cD1EvlqjB-M-s^g=qek%^kuPSCZV1o3emUBA9Tcjr53R=!T)r+jo!vH& zbTK523Wdtk-h$Ck)+Cwa#4E1{Y9G8i$HC& zi9OQC89hqN5{)qTZWlh`p4E^OgZ{kbOYwW+`Rs$5FgN=pRV4;0o6xsmTV?P7YR;PX zJN%BJdB5Y$0B~%D)7Ro6q0p|Bv at 3os2v`0jJe?TDK!h_rJ2}O!sL)7NqZUl8|WT9>Q=YkGKfourceJ<#wNKiFyiAU(K4WTtr+3g7W$+ at 6g zqd at 3Sh|NA1G$pEpIKBRcP?T1wCJbHXA~>L_$4j2zusRP<MBxpcD@$0WYWyVkx$vY;F5yuqoiA2yZV6O#U;RNwz`m{ibhH}190G@&Ck zkVnws{gs{m${v4ZdkD>sY0}yH*xcKsD{@y#7v-*&&S$T#4a>)t737z{2qx?|&7ib| z61^pT=1}~b at n{1o>9b_*%P*>rEvFso=o;vO^>5&z8s9#lHU$qB?`oi9TJL3P z++V+!B}%sxOEbMJMY__<5~K^g%*OXkkxyuh$=XubL)N_aoWUlzCVn2ok3L;Zd3>qu zeJ?NgXG+#i2Zx^`BcLhJD~Kctnj3R(Z_qioftfYEh at E-iOPcxYNj(>xjJkC?MPtwoUeT#6hpUXe zLg1p%FEHeGTj+LAHd{_nLPyYXHrp#=lQfq+_e7t6;~snnCXdE5aN?ARGjO5t;e@}{ z56UUITk!1I5{K9;joEOggPcVyvH6$S{S*C5QoT1j!xMNScHS2D!b>=RQ?sOF5a6cw z-^U2sEa6&-TNdd1}>Ffhns3i(YjqiSV+#586!f`xM_ at nQ3Q#fZp*s9R`Mn*We+v zW!YooR at eJ>$`bUqivD-=ZRovW2kl+Vg7WN2u{~=q+jNBrBTsdTy4;~IcdE;i!hU2a z4OGK#*@2#Vo8|)J2eZ1eRQ13 at at!&c za#Cr8mQlChQWdcK0I#aYcqBiM>6Yp)XqHk|x8GzUCyw)#d(!Q3i%Z zZFEe-h$Q8nxQeWXz}mKsKQ94AKu%^auL*blKi at -U3z*I+@^V`lD5m~G27=%(Y(2U+e&gaQOca!!8y0O@^+UiFajH%QHDO%0(Rmsc+Lkh3Fzc5Mckfw at F z4_yQ=XALN!t#v7(QTS%utN{HRlJzkL2CJA)g1T0-DQ#Swp6S+DKSbUzQjq7_ z>`F-J&Av#kU>D2eJJO}3lxqq9!V@;nnyyj}$3p9=pxK|1c%Dh at goJKfL#faV9DGf1hOP)=qAQn-O<0AIng z*!R&^E7YDa;qh3(_YGbxQX)Tq8azxZP&3@}v#;@GZ0=Kd$~i_s7ij9L6m^xojG4(A z&R*tIJ%x!9Y8A|jwmu9a&{gDkSS~FizhKn!Gt1a}{JvjGFE3-e-~raIW!mDEy$sLZ z)`=Ck5iqV|iFU}fKF^l^aPGr$+=jUhQ<}-NVO?e at 6i9Jk-AA-adb${?R{H$)Tw!aOu?0{FN)>t9}&J0BlRABY}>MiB@ zTFmj#cLdv*82zUrnU{zsyN)W z!K{(TDu4-$F0P;K9r4;N#a*(5Jc zTXpKAVV;`UWVARmENkMXhlZtC<6^AwU!R9(m_}i+Mw3au4LU9^5c;h`?J+V)FA+Z% zYFlVCW3o_7-#&ZC2(?@3CRV6 at iEacQpVQk5UB=sMlj6O-iGmNKiL$H>s>&_Szk_ZfJ`2&r#p&HB0HU4spJVKFRh z4&EDP=bt}@h0W``un{*>OO7E~+(?asaSk`Mi}mjYa#uH3xLU$Vbt_OeHL zx0kO_Mr?+T=Px;`jCcudtn`>ua&+xjrQ|4xs8YVjKWmg-Ej$|f7yHvgkwZeVMq{G7 zdYN0D-&zqG3yrC)Jq&|_Ny7K^F-l1j;iB0Zh2C*FPq9kje*f%cSqdOVdiw~~MUPeH=CEc!YwUzoF#LR7}NS=}W5l{@TAsN({1CNx(qbwg>v1l at ho=z%=*CQ*+L zy;qL^*yT&mis7)x#mdy_$&K>&vvmxcz6R2!2KD$f!Q{1k8P#wW}p5EW2wS( zg?RY>;vM8`_wfA|rODbPHmwv~7-Z;!-*V^3@|eI(CqSnYbf*TDe$=R1T!AS&v at w(l zm+i_ld{RHANLOX*$IlZpg?8TGXnj%Uv3b(iWkg45FN?@eiJ|wzcv2QDZt`9NjlLLN zxzynpn(;LBfE(ti6!(&1di5LoZJGTvYDtsl>B5a^x2q~SIqp7OK at q~rb}ZjrVA_R0 zJ8Pl3UTVYL at r`pZr{F)!jpTHJy8Ko!U|4tu63`lJwM4^ zCTw33n{}`}Fw33-0x=#|BucSEJhL)hSzxz*BKEFKDDXB+77wjt)b`QwQMl>1zj`yY zE%%MQ5v?CC?{WKpL*$;UcU)oD$9~ct at vhor$}_RcX)T3;J>~&lS0c>CF)qT`=>vXV zdLx_Iz2Dnv*4|TOOcfqPL at 7M3*s~69$cZtT8mQ(Gnkt6sJSRigjmw5?iaBz*ca763 zrz?3$)6!yoIaW%Zb|GFyPP+&%annqBPnqQyR at aI2(DqV&{Ku%i>?bGGO-f@~j#C-| zYEj at 790@^NJh7n>?@(K`0nUP{XidZFSL=$}HK=6GU8KynD~;tkf!B#Zi%wu5KRL*A z*?k9T7YWAY at t%#59T*V-^w?k<0bs(#r4J}=Ze{UO|JE!s9qGh-vAhd4UoFa)1J_u>+$)5X2-SULCMPHr`Lxi=XX1Yn< zs}ccr0Co9Ecx!@Lqa~&Xs!SnK8Y&jTIHJX_`!l85OsV#&p*G!9|C$AQm~NaSc65c~ z*p at 0KPIp#AnLkcBrObDdZw at 02Ehy1k};BeJ1(E zn%*=irlvPu{yk8#H5aH94;X4K)CPeKe36nZH(rh{3U$m4eMN}Ut&8Xk*a$S5LNzD|V&8$FZ=ZDD4OF;gQ2|tF zpL`PQ0xwjWPdwUzg*mzL(rlR1rwO|nT#)!;rf%Yu98n(&)#H^@;|6LP8o`cgNvwbX z)O<%#und)&gHl|x#e^+}8Jxk9Dur!Yyb#lZzNnhRK@%Jl|0idH*42Lunmdp9`y9;i zW$wBOFIZ6zeG+tnjVJ;BsXKU(#W<*2c8YBE%Y7KVu&~GFb_dpWGIWNDU!LD~Oj9vn z;*Cm4r}vC{e}U4|_>CoC6U4 at q*Sgpp7yz`rLhO(()3RIp%gd0!z8`b~#m_&DK^&(( z*M*~G$g0PL46_b%cN at kFIGoibenuM-R#)X at NluK&nUg8^XxH{~Wg2eVC at U+=Xev9< zljm}mmH7)hK}#rvSyjq*u|tp1T;2yym!rPe1G+{hq*6ZyL2;nov2{H6*ydJ~8I}pX zGhVw7YZltqK%Be0cl1z4bC>U)do1rwP}EgbY>`Iznxy#d{m<7zlF%JxSZ_6X4eHZ! zUzXeh6FAa?#jE2QemzRhFb6`qRw}nMtFS$*UF??QcHIn=Lrz}@h94O*YPQML*y>Dd z_furA?nC~mSPB_cy^}s1KhvbyS3C*)i}@|%BEP<))mcL<8y9I+t_(Kox?g4RfPSl| z{p0_V>FZn~uM@&KFj?}BR+%J&E+;#4D#@HlY612p^o7~GdNljZQhmAdt9cYM@?V(6 zR+kbwsLXeYAJFY5-nB`pvR59hv}c^EPpET at DY*VU>7twEj?e at OE15L1ZdCsDA4v9_ zr5Lp&N&X&*Sn9@#y|RLNWdY>KHhTdFeL%c+$OxLLTNS zmg~*(RS6B!CD-3(mM0~&NfR2G^ZI;?JQjeXuD{JHJH3rI?;cx1n|x4#dA{lK at eK!B zy$$B3?mhGcC#Bxv-D3ta-*_9%km+dm9yCu$FH|XjK>GgxXfx7(2k7tgHd;b;yjM-b zq1dDam8G73J1qL- at jPs>V zu{F8b)8tX*^qsNtR_$A*BNtFF4*4)VTk29UTmj2cRUUp-`cXN1HlV6`75-R^ahfI;I#$^|7cHR9 zP_9{r|D@>i6xOzl#~)3;MW15IFdA-`h{pG2VOD<3S-BZmxei3x;?Klr$NzGp- at jM; zX^AI(Ce3lP3!wbKax<>sR=t1$SF%*@0rPy8yg0m6O<0UeRr)H1%eM2wERBE?K$_3%ouuHyr=!MYjO z2hxY0I9BtRX8J~(jdMk#phHySI3L*$q8^HV}Nj6HNh)<7my%5xL at oi$P7hN z&n#ny(c at g6AKtiAx=<{1u8MCdY$FeoW6HRJo_*z~HiM#5{FUJne9I(;0mlEez(>tqdCn at Vs|alD|4rdS4Q$*rxZH)yN~dl`w{lQS16R^ zVdw!Lxml_b?g7#txcMGB0rh~_g8)h&Ev0Z^;g2e`KVuh&0o1AXK|8HJJyP!ZEQIy za+*unUX)pFbHXYqQ1}|?au}Y4wg(8lcP$hmd88lrMDE6Y1m*t157|t7Hp6Wj%hqpO z*_Cul-=h76huH;Gt8{&3eR4k%AZ2By4^rI2FrnE9&MC3 z>Mv|S9d}hMzQ8n}wn;`QX;li3V(Q)K(6f9)BQ8Qp%k#8+tCHMo48;;p$1IOg^rpi^ z9TS$jAU7qpXo-WiaN-|R;x=V%YoHE~BxN1+uHA30@{dyHANH5L9e4~)Q*}$$*WJ3l zZmhrLtw22j{3Uzq#;%)>RdxkRPWVgO>PNx!G)vfa1JwGUzob=scm3qRqs7Qy$f}gQ zuNJnxf%_PMGE4*hlJ}L8Hep+QPD$&!FW{D7ZP^v7ysaA92-G37rJz^E!K+;#_!%`* zIV+1pW$G|M>JS16W+CK}2HI$l4zfK56pQ2hFh`#92)dlU5(&E4+vj0I+$pphtB}#+ zVy_TrzxMPzMy}q5l-O_2BaH#N1a}wWrvjA12LB^_>E>-ekyZ-d@)Jp=aIc>Tp)vH4 zm*_+CQC4~$3TRX4K2`pZ%|h~W8_T6 at E1N>s!|aOla90bIHZ$}duLw>$S}pv~BEbJF z7S=*#?|8WY?y$_fSMd%DZFmjuuvEUf37hF|N<=dqd#)PlxmU@*M~vD0T>+j6OkNs- zCMsPHD)NLXJRpym!>SFmU2J9W9h~4y997xG77j<`@E!OK&DO-G4wve0l*-=3gyB*> zdIx?iazuxldY^WJ+?4K2+zY=BIUb&E-Cyi+n%v!AD2EZ-y(~S+%J06gey|Wwx{XM4 zfghWPVeM~#3T8AGS4Qi%{J6^e6dpeGgLM;eSJJyTWT!~>JeWbcy!%vT4`dVftTpHP z3t{l@$*D}26S?!-Je$8TU70$W%pWoQ6d0*-hlTAO{vkk4R}bNLh;~rg&|+~5 zfY;6Rv}PS#(B19rv8=oTt*tjgQa6H;4D at 59>;jdHOEov7 at 9!rfx} z*T3-VlO2@)kkwz=nN!&*y#$&@C;ym?`#saqXe!jUVxOG)A!yoeH%~y{nGfMg0fxdx zK);Sf`bwOzaw$wh;7caHt?sesIpe938G~q$P}cT{z3U1hn7Da(H?-FayQnD zHmySq%GruEpbGg|bsDX*Dgt-3p;4=(FYE7Yu00}}9&R7Sy|{PbCUki+tU!>GMt9%~ zyo-HJAL-MLcIK9rM$dHR)C*E0dqL_zFG#~)kVL(}InWDIu@^uWKrL|ny4($FyQ;2V zJtgwpZ%8@!-LD^cPV=G3{EdKZhRb7ev%kd9OkS=T_^}71QZtT%+PP3+RFn79XOtc{ z6DR78bv)cy7seShR&D||RysA-pk at luZiiH(X2PYcuzf*6nQ~ItUOdZR(g&(N#Vb4< zXQGuP%-Wrx1h_m`y90%t9y5&2$55LN6xm&#YjoHafF<@|96V2!4trXIIbEJ9I;`#( zvf at H;lFKty2d~hQrno%U>aa?Hm3gjn1DpeH(h{Y)JeTW%XWEY>9GIR zV7V?&p$>aufMlM_Q>23*)RN9}d1mXdJ2hCb%Tuhw+#2jAm*)l at He-OK%jLOI2T#_L z&U1O@=&&RWw!r1NNr#y<*dmvwM28)33zJ;x^32u2AGPtM%UqtDby%YYt9E&^b=Y4t z*gY;!jt+ZjfaG$Q=XxFdu$EMEc|;v{uLfJ=^5p8UTQ%52F3&U_Rum at r2HLZl5K+!_MUhqPgoz&GjO6RjMa zMBow~e7H4CK7}xIb<8d;Ln?u9cJo85?2j5ajlkJ@>c=&3I)QU^@B;(nClls+9kWEs za0P)y9bBw|GYOolgRj=WQwTgw2ag*dpKTr(D*$8CGUS>E#tHxjKj+{)^T1dE;DZ`? zmU&>T0PvfihshV42gVA(Y}7K`WF8nR09>zuUFLzY0>Jlc;CW^~R(LE60RxNdTL%hQ zV2&6q05wC)v&bAVSOEOe&oKhpQh=7}BLz*MfN~JE+{BgvlQG|>DAeALhgw(J*c?zz z at c8&lx;mIhS0~G&YZA+#YYMv0CPKI2M8dM^7AFsOCEenL z!7id(oFUlxbc?eBbI>i$1k6mgIQp|LE8gO8&q8#I13YV`TO88aM|6uLGkez>UfAxY zTO5JeYjlgFE_;z~ahPS#S~Zq&bY+hd0!LKlqgxzB*-z;fhfQ`L-QrNmmeDN^kF0`j zaY$ry>Gn9)pKd>-`qOPY)t_!Vss41^L-nWIKB_<6;=sksbc=%(>$2c2j!rCO(OAYo ziT#TZI1sUebc=%zYoS{lZP;6Miz5tsg>G?FVgF0FIFhj6(Jc-g>~Xrq;ez?-7KaG- zQ at X`rf!#;9I1aF7bc?djD(DvFoz10NlyNqbZc(UNw$3ujGP|4*D8FnX-J;C0B)UZr zW$|>2Qpx(wc#ERQI?S|3SpWV at agx02(oE%VR>N9aDyUwD`wq=aSjE4u`c!TMy+LVK z{>g6X8Mv=G zm_155-(6O3&dq$tnVYlTaYbgGIX`=p{N3V(O}V1nsWjFvFxREzWjp1r&^HldgjAu# ziZO>z0Jm{4!y*Stkxh&_szF9^h+T|n)gY>gAemx}y`4k8Bgm>L;#3Dg%GeQvM4m;} zT~NsF{r&<9oOLO()4#UQzqaR1EIxP#HkAt+>r07moDx6BxWsuu3LZ&f)cOO>Z61qi>cQX$b7&-u at __Kk$Q2U9AjF+X7_7Sc{9+%Zsq`g$WanBTe)Uy^^OZDT84?Hloh_-AHr#MgjL zJ97%G3MsmCp5F{>Oy54q0ymFfKT at V~6P;vrsUIt(XJWup7k!>gQMVz0jaK{h5zG#^ z&|rmqexALs$4{C3&%UL1#C`vljwsZAYoaamVoaOX(|*<8k3G#P#xM=y<&Y#X=C}q~ z!67MPOuGhI!Xc?*Os57Z;gB>jrbmN_9Fi`^^l6aGI3!z)acaH#d=AMKW0EvT42R^2 zF)13P;|GGw5 at S*|$QK+^EXJg1kbe?n)lK5mbWJ*TBE(2K;FW0tl7a$~8Z99EEt-Hd zu80;8!AgDojumQzL at dBULaitcDMIZOZp$xN5f+D0ZZ`FYev*YNDd|Cyg6zOdSnDV2 z9em3nE(oyJPtsveAL5-J7W!j!Se*ugh5q?EOga=61Xx_B=-^5%DJ=J0I_w4w25bFN z9X3^i!CHSD!C>Wc;Q-0YT%NII9ut1XW(iu-%Uzy%deW|sI4r~EnXkjX)L>V*JY_oU zA0LHDUg`3b>)`EL(yLsaTXfj-8tg|dk6VX5ropoHMgviQaDZg4-fjS1rX|hO+YMkY z4K_<}H-P18uwuP|05*AmFwBr7PdO_;u`Ny)b4Ju;gCDWra!{Hp6G^5|C~(C?Q0$mL@*pd zZ-s7d#wckqc;FPqBGhcemaw&*k3;~tc;F*KiZ%rK&f;i91Jh5>hDW0vLykr)qZH~~ ze2+Lnvv7|f3wJSExc at X)RX0usfo$5CiN8#B4Py^~Xf7J)QBiBscg-m!_OfyqP_NjM(^8ci^KjE+*wERaw;7U6+tZ;r0Ghb!`k#z#va-kexN^)+pb=x z6gt&RcF!(t&uoHH;`FxI{5he-2V&>?Wk<*wSZo$69qV>2S)8 at c*C0IExKL at VY}zo_ zjtZih{61Pci&FhXDVp&X^bHrb!<_2pcH)8NZ$Hy^-dW{`FRxGhg=9d=3*}>CZQZf|Ev>mOC}$U_+^^th at 6R!z z&A$sJ!@rAWYs%ZmEVOG?cb at V}d%wC*KWJ21rnK{>RGKHLFr!q%D at +@K_Hb8ZqlPXM z`f&|QA0 at 7~EbS?X} zmPdAAZ^h8->rz6ubQRex^39-xK*}pzTGDsD`lb2gvc8h;qD$B$us$!8tz7R8} zDrNSrJ!Uznoc-np{u<)F at 0$H(c1WwtweP?%L>KQB_X at SY|9313wJGo>wX#pVL3=Kc z(M(_zzsQ0zp0n5~C5*rog1^WfD$ZH#ka8jM(;khdAC-B`p$U*mo`Sd^X~h2UhTdDs z+)4|mGn()TNEuH`Eym4v>r$YE$gV=4F at 4`h^c90H-7qKOeeURs=+*|c)cbVog+=zf z&?1dJtIl3RPLWf-8tX98yUuBnGrj8^Ch2l5+0oavWMj)*P>UHqXthXyS7QV2vBje+ zfPMX)?>PV1U!=Rp88C&7^_cUj8z0SwhD=hE-v+jz3)o~EcC#iZ?1sD0j9{bG<3Woq zak7UEqm_^-g>d-=bvO95+zR zm(^jKc|giJKdD6_c05gUp`E(*%`^y;BMyI at w;?aE#OAk3 z$@mr4>Gq7N-v3sCiJjn{%#}Fropu~v7b3b$vEtDWyp317*=h9J83#9R+ylSfZW4TG zK`Hm-RquTRH}C4+(4{J6yzdBP6nVzCFyk$7GY&Gsnir9oh|F-a_B##U!EF5<%!Ue% zMV3ONTV_X2CbLugvr~zO+7Q-NwkyqER at 432_^ylF*>BEJI0e3(cn|O!`q>ZYw0}3< z2kHJ*6K)!5hC!`I+fah11UzPSb&8tYpdD19FRX=Fr_oi)%_&uWJ6uw${Kb|k|8kpB z>4d6OD^sx_zn)~r0}ggMt@;I+QapLE$h<>ukqP=v=9e?zJ?6QqQ39|e{4RKfCl9s3 zY74>mx55-pg2(9%)u6KuR#`(E9<^OlwA6UTqS#nS^ce8I945&OA>+u%jHunjLe4vf%IV at cOU>t^ zrUJ?WD2s7EM;r|-6d2{+#?in=ffdk+I$AQJvPP#FU;;r8 at cA~ez$UFGJ_>9Gr0(XV zVgMaufWC>NV*wp&fWDHW;{Y8uBqMfW#4Z-tbw=z4q`r(LQt^O}H$XRWbON9g4A3ud zbRwV=hhzl9c`Er1vB04-;xHgp!AXq(^aum=^&CAC&?61d<2hOYv at j$iUFfGg-zgS2 zbw->9r1r-UDd at ZfqYTimaP)bAKFH_{hZWyfd0+^J&&Wm2lV$w at 1LVj0Q$s`j5vvrcCnycXQbVL z)VF3LbrR4g4bbm#^eI4}GC;q?(H(&97?P2nq18FRQ!MDz8R;}2bsHyj8qlW=&;=ab z1?VmV^u-+A4e0J686hwA`8{GmkIqPs0V(_{1fCt&ucf5_b$kfv-OABJN$;;YT9;m9 zMph6bC&YpiIwL0xNX_A-hLYYZIC?1Q9l_DM^cpk5(43jyAr^G#jC2@~+VcaE>KsCP zpXcbIq_>u%b?G%`WC=0SEf#d^jC322%HgDXhLGNI96glu_Vy84mtJE=4&kCZzgH~i z)fwqEAhm^)>eD26F!~o9txNG>^h%D_CE1vf5=x}Yv`{+ at o+IQscqK9VrC9K#&ghp0 zMaA)=zS6{fFq-ueS{M1j=zScmi at h zZcggN5aNH+*1%Ck}8Kg9Zn7&#>toYEOFn#sRBL!>%}5dWq#!;1e4XQIS^i1iQJ z6!N>of-apAqnW&dljFy|zH)d{sH at 4E(V!_vXD;eqTf8wNu(%qHrVI}^a?kI^jW at I)o5)=!9IwMB2|ApOA zv;SbUrF&S3|EeoW;*A;E)rE|FCl-9CGh#IR|F$b?_8*LXpleu(zqKn$;*A-ZN{pNo z3r^~c7|s4dSJdo37~Ot)ScyMyI!fY=8F}e6GIClhIIS~cH2XhtI%@VGjK1^quo7Q< zI!fY=8M&AkIU^RF(HSwC{eS3;n*9f(KkFP;;&*pONxU&5|I<0_wEtjdJU|37$XLlyB;JD&O>*XYx&}H|Cpegy;L5(ez}leo*GRaN$?E0C+*hY zHE6)d6t9*fVOyrKZEQvdw80UqU{gREufmNlgUl1l&BC?{vjN({(H1~k4A9?^HDI|F z&{jYT5omY^U6|xu97C7e#1%F)fiAZ}Qkwy#=Q*VqK*t!MYdJa=(6I*SJ2*NH&~ZcX zQ9yjy#T9nq!wyO929(BgO7Vb>H$Zcn#qtC|Cm5i)&0={Xpc9AS;|;U~EO&@29K?qM zk~$11{gP7}0q7A1Xo;go0(ztYdOk-BfEI?};~L__DXwrL9~DkW?lhp3$SIA2B%=(_ z$7!j*{5(LPXMp}YM~?>d=pp#{6B;CzCy6VPs6CP(X_5gYFQ=3Y=wt)*GL9Yt=rIQ9 zLXJKk(B}`q$3?_Pint<$_(*}IDF&2!$)vD+ETE0V=_p5!1N1lp^iGZ*59skj at PRf9 zWqGQ&B31H0(o{&AIw+~HONMcE`2;{sI2W}DP!|B|f^$*%fSL%XiH2p%7g_gM-_1CS z9wwX7zSv^@C)pQY;Pyq!P8f}vgTF;VaB27~(s@|Y$50Zi zQcAx*7xfmPKtm}#b}niopg=n*{bmqKuel1AoLNf^_Rn3V~f5g$4`;-CN$8X zaCM5MohUS=pr0)7G@#T>c7WxuA}Kv>fc_&#gGy4`Wq_{ZXi!N?yNBT84&tLnEbY;o zqsM^I3{D8vBc*2y&}keE3Q6f%1GJf=K_Mya9fFO|&;;P>6HEJ~pKvz%B2&%-LI>q* zfe2_Mr9YgDf^lIvXd|Wlk;M~AvMGB)fX-9#ApDRf#f8D&VI+CiS7PZ`dP{s|P?xD> zg;?IE3GZO^7>?Ejcrdz~*8YYBbI)nv+<1qp_e%!HiR%g);p{)FYk>K zW+OIAh>eg~8q(Pa84$XZ6FNSGFxz{F73SdCC}B2YqZwEIt`lPE37w4-285pDgxZG? z<_%|u73LqGjS^-fHf9hT9b##R&PIm;p|PA$C(5${`s|ruW%6bd2Um6s2J1^*~Aq0L_&#(eNsV7R{ zjo7rHoyK)cEIp>Pam;|wQCiuDC-K4Po!!F<{IlIr0&m2|gJ^$neJhrJtFvJ=kfQ{nxHx1^)4_D1kR(<0r(% zDY5jF&W6!MKD#SwA|H&N+%>Gg$9F{uyb&A6aKZ2D5=*;CqPuVaFrc)Xj0Zys{Bs=L zgFaXFH<=-iVElJJFKp>aP+@ z`-R#nk}bG{VIbp_Q0qkbj-1rb)QDd|`Y#O1|0ys3%OS>rg`LBW z1G$}1ua&}YrPSTbbyYIC>>xh`ooT4$APUKQR9FS8&7l~8$q!&sIy_D z1KiUwtU_PXG3+>SRY%k~V8liuvGJW)`kl^(kq+?nsbLlR=2OFt1AjafH4YfDQHycp zT_?rTlhip*;<#Wysq)mY8vXQB!wv)!Pely`Mtt;POn295vGlaw97amOp_9WZ^xIEH zDfENUzdac>@f)!r6B}p5(la_6MmoUFCx=z&GfxgX4vab(H4YfD at qK&bSRm9UpCj6} z7M#8J1aC*7DzPiFL)c~(wvEZyL*)(ZB@}#LkZm9zuw`Zg^d6430NP at Jex9SP!nV0q z)FLpE&>&$WbJ7 at kHNr!m(|t|IYdO^JhV~> z@=Z6xb2dB|!*kaw`KG;F^G&b3o^NXWOTOtoc+Q9C%i;NYcxF(>8&JmcJMv9UFXfwV zgXb)G9uLpa{-6G{^CyZ*2TvUO!IP7LA~oXViT8MVWN>o3+h5pWZ@%gpz^L~2Xp+7(00C#v4w|d7;qKL4!0Jaf-loG}r(uJRRl34=M4F7uNMi#fM at u~zceK=FcG0Q1 at r&4_o<|G$DZ5T5Kk+`l6 at 6y483%;z<|@A}!P{u5 z^3i#v?_n9O%6lLKyFPK?#L}O%n6-AdQkhi7!%Q8Nof!R0_sfcYg!KbcYJ$$p5B>e* zmlbjlAC}Pm2(B!l50yRMUR6#}PGy|r%#75SxfGZQU8KMeFKlKB2|3ACYn6Z2jr|LP%T$crY+=h2!Zitcbf2{6wBU_lri=T- zEm(+g7h=(A%tNad&#!9%oKE(D)7$_z5mVS`Tw{Oh_-ZPeU4(}POoV4*n z@^eiW{|NrT4#p3}Kl!0w9f(MUjVaM2boRqewlY_HZGxs_zk7K_WAnDm6MW1>@!HT))h~BLdla at Sc}{W~ zJKvshNZ4+>#@my>a)h_%*41_lT)cg6$fE2Ow#Onk5y7zpZtyl+=12+HUNKeTUg;fS zJ2Ycs#-7l1?lRm9PaEf5tm_tvW(XyuvfXwbmYDMs4o-Uh`U-2pquC=erP)BRf|mu^R`H&hDdLy9Bxjc1!4!(NzVpRG<>( zbE?{CQA?ay*@IZw{ZbP&i?Dr9Mq_AnWMH=|$i0^{>|7x0NKU1lj$q}Np^shm-qqaS zsHKeA=3=B}^ds6UO$1z7Im`Va?Clns#KB#nUfSEQYNDvQQ*&z`fXT6|5?d}dNiJm? zI#6 at FJ*vy?<}R(9qcWGm+Z*uIaY#XL_4~u#>c^pvvS*-qtGqqjU;Wpama#Fen%w|?0n$QJ83`pq`%n6b3K;JPVUB3Th8%Zzw1mtx)s*liMMC% z6Hns1TijHYGg5OWZr9w2+vRq5Msr|+rQFS)%OXz^FkFl%)7*l$ELpg?3D5W(eHD-L zIzq>E!LNcjIApI9-<8e-LOATenAu*%gEK@$0w)PQ`6hJO5@*&C at jZETWbAJkDKIWC zcCSlVbxgR2-vO04Cq_8M*37G7aB0`lyc+o25IH37Q}HGLV+CGW7%Q0Dc}G zVNbb6mzHK}Ciejk@^T%ff9uC zC?A+`mX#|f;iX84_&P3!!YY)*T(JaUa5D0H}}ZmTr-Lj|geI&KwzE at zqf=)@L(Yh| zK^YaEZ0P=U{K1P4_>=Jwlokq9u-BZmFDHg4XjB at b99=lFj^G{*u|pmu_O2qY=S`9Y z-9CpN2>tYTqQQH zx>oFv1R$SS?uOp{H|j&|F%!Pb%kgE_n$WLq1i_Y0B?z-)LmZYF$YSLO5OfS2aI~$Ybs<+g7H)Ek(OCv z7vGb1`pu!&!|}xgA4VGzJA~T%@!Bgqf^K54DmT7DRS7QC_0vJ|Pk-4T6$|z_qP!)2 zz|3*hv9(_f6duQyuE1hq3;7K!>?3FW3;Q*%-*OB_p}?>hHEVlFf9i~MzKXOc?lH^w zqpG^lMQLH5i={?or*Udu)BEq2VdU$?26-|%4TgJ1*1_(Rnxo*8O1sk3y$s-cm1ewJ z4}@mJ{UU(OOLt(J1Hl7F4t#syaN<6>v%5oSN9V$jVph7d`%8??ZTWwz{h>&(w z{M-ycjGvdDlywB|>~QDk{#Cml&;=jTa&1a zsY0PvHDu7Jv17IzxqPETH;_^ak81N@(91{ztM+6 zRHxdNPuRtf$jSNC-FYNJv;|)IKIXb8S0K!U*z=%gCxMh zB at o#114f_QjK9wu#$Vra^xICq at 6#`%-~IIa0{!lVU-lL#9WZlbFc%c8jfEvA`_-DA zZvh$u<@k1>rV0HXRk!#=3Q(ITFD_%(-T)2x2F9AjD7E7;Jna}5h`*Sqv^Vz0G=33l zb!^4a81&?KyYkc5%fG{PV=-NlK44?aT3bL%wWljDy+-h1#A6K9Ak?C*r2c&W)}vIv zR{CwA-g>AL=Y=C?0jm);#??bTIEYxB+Xir>)*eLvLZ#?e#@Etsy57Ur0fUO8k z_hdvfz8}%B?cs(EJ_Z?~WG_pTioMJ#P047M(u<3wBzS;20ycgtCJ$D at tv7(Tw&1Pd zdvkCu+;5lzvFd*Q2P$LRNjPr11|V(O#vTDoJ-iY$Hne{fRw3vA9ltH=^`b7 zau}d)$nNch4jiX-&DsW`_E%^IjZAS9qzK*~$NNgpGT|{g`&PRegCO*{g~vM3o>_}Q z2D~g^azY2HfIoY2vEchz3^d at Lv=-`(I8kwd;Ek2%-AV5>u(J~`JJ2VAe`gzY&!LdJ%krBd(Q?87N}&$nJ)_DouVXVq*Wn4BfQ;xw zShi5R2_2yykrOwjqfav*0u}mgA*sPy1n%FD*O3%gDJ0H&6bp32yBpeLddZFp3b_m<#cxZju-t}v_#pf(l3MoiMP zTzHH&57r(=V@}T!R{&_<$(gro9TN zcxb+o6=s;Zx{zz3_M)?@%Cn?Njw;26-41P8c7E%R9cYyx9wyfM4{w$P)LIG9-H1Xa z)IJLdt)D3~dlYwH at LhP2N7bCYQ@)+|C2HI99J*f@%LjxF)DzJ#ziyhcl_1TP=;@R_ zkq+WPowma7dx^KQlQ)uoD|S<*CzQYmV|K;L?X=z|yPtMLDKq>0zDH4*xIn5~Q4~Ou zR4Rry1VEprB$%VzrA{rzxR;02{mL`#WJ5{a at D+}*(gaOpU^UIjm8(pxd^Zl*d?^Add+3Tdv%IjxTb z_6lh-zq82`Lt{BLP)RfAus1s+=^a`Bm3JqNVhZsj%>y1>3bm}TJ=W!kMfcGG-{V-! z9B;FQ&FavGTRBY$AQ*-ebSOB%BA+Lsw~h zN|O|7i3o1f=CBdxa*N}LNgAz^j>s}!0?m38!73D*hU??%Xs)hpkOYN#d{$Te{@`2G zjx at _lBlu06;QJcr8>qs3=nQ|8nZ1dBlX-Q2zG=%p@=agD-}Haxo9f~3*YI}~{ze|q z&M>&^E1jsn*B9AymkYIM!>{Run;+oy=iV>bbKR0H>tM*5b+DkO_kJn9rWXPG0I)9~ zH%YvRt~~b%zI|~0+p_ETNF}*R z4=nC3Id5QbS0VnFi at T@)v&G#?Zt|%05P!AE_s%KA{nNj4 zyaF5gIv1AALN}$oSoM*;xDjh->_ua=7wr^aFV3{9mE>W9Qw=ZgE>JdSniX>umG~?{ zP2?Z1SD1%G;1^5}*xkjrptC1PNnDPDrD!D6UzVtUtcy14pU2;T;mPu&UAL3m5>k=8 zk9ZgUvg~RdNrWi0N5y5o0;3h;ANc)KWrc@1Jn2NQ&-5%<>eNWVmiF9X7FHvZ*nZerbGq| zPQ2W45bH>q=jCZ=s;R)s>wpzmX2e6yat|4 at LKnoWkT7i_8LUFW#)X8X3z3aw%|>Vv zRiXLph34}lG0ml{z#1?m%F;O_tPb;YWyjRd9NBJo);){QvaLpF!QdmYp9|Hw*s at hd zD09c~^6=wKj)aj!reZR?I8yQ>b$L(T5cVK+(60X=G(%!iyv%K)@3pm(fM^NnU;9J&WL at jm|&D3sehJA#Mj>J(Gtj0vPL#sCi`J`>h zS!|lYLy}j78OwD3haljSR;xACqfYK_SJx}WjZzMbGsKpmtlAlZl~cyV86r7L1o<4| zJtH$gHbY142q_}Os5M1L$XFevBLuHe<>j`rm5z{sI!Z at K;s at dg;pL*vi_VY<#2NAi z at 2(?yP)#0%O97<9N-sH$zN^c`5G1Lxz5glpQs2wnNN*ciY8sFHh^=$mv_2YYP1!U~ zPlDLcf3z~rmUYB*C$fe(TX422oh?My(PqY6HceY4lUx|wiKB&JYxnAP72|3t#BovfHj)%NAa|!Y zk{WR3!{2l0$Z;g4v&*H$_^a{;<@P0hAk^VtcKpcjh@=?tkT%NKGs`Iql{LN{Bic>( z3dChvle%yi7m*}yDq(pddKJYe+IVF|qAEz65J!;=SSp~WQ-~-J=oI=(Dtm20G$!*UqTVVpx~uNIID=p0)1nmC6t$<#pIiij-YJ|Tn?cLE*pGmM($j>UxO zMM{Quv_%QW!R(q)C_u?n92tH`1sD at rqZTNcGB=@jg&TTSXxXbmEv)dUMt6l;GCV;n z{hAp*WNFzfNPbnt=jDEfU957UW-;xE`u7%$=RoY=$HLt$59-k}v&<_-}_!!Y`h(bu#_MT2Y zWMIb0ckrRo$%kuI4<{e8G~?vU!-|NXk6=#xe8l)GE>bU2cz-VO_3?7iBvX(DNlHTW z_sPmxO7(?)i|-1_2utgxMB*QOMp!V8NHikGlF=ATybO%cF_8R-W7h4B z?AA)r8!5o=ixbOQ?Tz##BS2VzGy<9srqk)qRCO}mNMhD7-bhSmypjJr?O7l3?-5l( zN)S_dBgy#p at J8CaR(T^!ntOO7F}m9uNsxHJj_h^SGdDhpl812XvGo>5%6J!Z9dcOCfhImotUX+;^W#mQadr>?uFJ9g|C(C`h5U}Rs-jb?aB3bpl%y}kk3Xw;tS!~ymAPwk#4Q at s)d1QH zH3+frJxi+d3Su=V+W`7XABgi)n3ghk((5zZa3q3OIY%sPWr?#|S=2yUcn{rmtr9Nn z*^QM?t3tJV%Rsq%>qjGnq6&HtK(D~IR67UpgadvlMbIe~7>5K)1n=oUXjgnqF(1Fh zL%+PMs#IDdZII6*J2v0@kz7i_|@12u)sANJ8&PLR*yk=q%`hyTp at 8r*z{`CB4DWyL(gJ zAUZ;sZ>BmQ?S2jDeqDO8;tkN9IP#?JO1krSW!|tt)Z|uZZFn_<6^p6}?Z?I4 zcs^W-D;KC{DE2bo*0`EslDrT6!6|e#fh34R1xV`36^CdLi{N*rOrh z=9XvvwTCVGL!8Y8t1g(vPuJtO+dQ};U2!&tC8~9iXe*2mQ>iwk>tIIdK!p5Qz8O at Y z at PHZ6Ccda4@i359S>ZN0`4901vH8dqyYzA2qocrpw-IkgFr}lr(TV(>+4VRM_k4-B zb}^%<9Ja^?~ThY^2 at k)m{+3hN9)d0I#06j96~;Ba1+1L$hLCyF681y}g_ILHV?tUg$L(=a{FYVk)_8eSa1}ZIul>unx_a(k_7^(`KA*wr5^2#Q zQ0afdKk!|K&PjCUz?^2GfWYmATz`-eCz;RVJcGDc^-c+=3^(&Mp`n31jgRVSM0xTrp7J8l&zZsa(hmeffMO#H?iE32I=tVmU zxXcHNP9pX2&vawn2?1AXh##W at 8o9~`^T=g=ELzYIW>n0-a|$JAA{i)N(4e> zJ5>8Ym8|rwqDPzJCpH0gcV7vNbAqMqAV~8=yCk8VHG(=6qms~@l&&PSv09)omh06B z8p%GPP(`8Oo*!)sv?JfuxexaJN%T4735-R3Uki*`E;=lcmZRVv#mVte=`Cz?poLCD0iiOd{(V0ZnQN zga@>A<+I^|G%&-N1(qr`(U}DlfufbvXnNL7RS&Iis(Lu~B6Bzlcm#@~aVyt>2ckF& zjB5n#=sU=(f#t)n2-d3C*N6L<<<>5PJnfhVRPw&7eDLF}oK-#@=(`pBJmKsm#G68Z3XjKbhSG;`FpK99di at QY3T7zM^Wjm-k}X!AJ9Tyd zsf3)`&TM8 at i0DE=ogbbn6m=jwHAn<4P+O|hrvpDIRgRxA5{-o+)1a(Y2|LgQONv7& zDEse%lco0mrJ{5(>Wm*f2ymKDKdVQ~K+};4(Pt%UQJ8LE4kx((pLIyvgXx=4em*JY ziN#)t4$wc;W4{fN{yW5-Y>_r_tBYJ$+^H!-#^qved%98^XrV2w!zKEymegb;HZmWU z>iGr0+_f6{-fYgl(jmFfoo$)J&&l`skR$xyneU?zWxuM(;pOfI?g0X^sg!tj0Mx(J zQhhp0G>{$yIIVI(E0OYr#3LSHGvaqPak9VnQ`lDv-m~b}y6TTk-0O9Rvfu$?dV=ULy6 at a8N z9$w>ALAg64Z&9P2SuXH14E|y>f3carI3VueFVHTAfKb(to53o1QbYMIE0G?=vE>{T zx5~N8%f-9(?G%8AizKU+m-i!DnSu@?7lv0jQ0fIGpbE9c=#8}5JQz&#Yk6*A-UhdMcrR_a~K$@;*`=Zc0BVQ!A4K0a~- zr8qfUelJ0iO$5m@^{iEil6Xy&B)~dWC}7A*6;lUtctSK$B{f>Fb}9a|9S{umbi5|M zKKL0g46&D9?a>P&KqMR^P~Bv~ZxssW#FuVmm*NT-OBYdhTFU~GfrW?PWo-eSE{mKB zssa7j~qp(xvO73CUMoInd at tLRD zZ6h;hONa>UX&ai8Q+;Nj;%Sj%CFFn!qdRLrx@Ogq~6-V?iNc)lL1bKP6M1(OapKP6m>XhV+D{3B?UXDkEx@%=l zNk&7~5NQ^Q{1LQCckVu)%sm-jaDbW0E74W>7%K$uaQ&4#doV}h(@6;t at qxS&-Ya(v zhzafF<#iH`{iWtJJ4c_o82$oI!DVR5Ob6PgE{W-5_- zPt8Q6pvm|0c?AOi=P5U+7wWd5;hB&r+R2kN>q(mRB+Yt~W<5!>o=mgvKi%y6Xem~4 zRWvjjOh#R3kCz9QsiOA?E(!x3%e-9N!uG_3cI8khzb)Oe2lt=4dgu!rwV!V<%k5}$fpdeq>ae4oO{?JHnJ at pdo8!~j2U>SNuJ5`AK z;92}1F()rC6%E#P43SBr8skY9XPYzs6JrI-ZK${lm(VF9${M4@@ff?T7d^SIJf196vvnj&6d zIZi=oJ5iS7mRc81Tq-pT! z{Qc(551s!7exI^V#hw<@AhHcKzH1)XIFg`Nr*@G!eZPir!d2Gv^o?eLg at H$KU#{Rf zO(O2P&*)<{XqM1h)EVhbXUJkDn}Z)PLv=a?)_!5_h9f=wS>r8Yksx zUVY`t at sw6k%L}};a^C1sX~^bOB20yxtS(l*zLZz!rU6zcj$F07k(M6iC*>7*8(Cpy z0m(c)pDX3LFeNZ6o5^aWC$b`0M_|@LB&&%kD`eATvW?I~_YLh2-SU&_C?;wvOwE)Z zqg;mDW_;+pT3&t%;CS2hPH$1q*)~iHI at iEoEmxJ>qKf97idf!#JpylYDgL6Yr44tjb-b|I^N;w zbTQqm&{ZSHK~JPFu3$q_wWgk21oW3DDFnv2I at 81_q3DgZNKfUL76El=pbq0-_oPg6 zvJ`sMscKC_sSemu`QF-Eax16Mb}+;+VF!$pB3^;xbc5OULZUg4-wkgN5E%1Td!X;io-r?uT=YtZ{s?!qgnmGeMf zU)c{DPcxbg($yvBlpjn)7DX5IhlCWpiTkRK%TKygVsmR>{Q7 zuXKQ2Ef at tGD>TcYB8hi at 8+kSGgnqR6e=`MtW%0yNOycD&=wj}vk}d2amvw!CFIkLZxz(VF2X}O-USXNj<0~0)-LJBzxGw4a zDr*p}qs;cCLfxdS7^GnRf4KT}6R!~q_JK>hCLP46Pw- at Oklc#G+!^r`W(4JRc#y&C>%%YVUy>MNj9Nc4=ygw+ zlqGfH;9~EL-y*krh1Y2SA9oT at lk}p*rvo%ayz>ZWe6~P&69r7uG{-`BU9K~F{xbU0 zdvalP?m0D6w`w%6T*0G)T=^hBdgpTJj8iqGTf at 44A7ab7(ndKOR at 7!{ze3azpww^h znj3y_u%pOK#0d_TYctkIyaJu#oy_TeX?p;@NQ!f{x==`?rsMP8?sr;{uD+EEG*2|D zK;2xt6lg0lD-^V=Z5Thgi#bf+>;58DNxZJel&(IfUMs7PDer=U8ezRtUg>e%2FV!D zraSiceE*_s^>0y^f>$CifJt2tWRe at GF<3N&EL43#ca}<&UV7KzAoa<8*%Th<_!K(0 z$PyFGfZo#U?iFQ1Jd0k{J6=;=Aof!UP{6NuJX>90@~0A at z-J$|9IJz0i)PAb{zn=V zqq{yz((EpFOxB|w409~)8RZBRSCF&$smb>KBntCn%_LkOawJ%&5; zgK~8TE;3Vd(MJYMk2`(1^w!?5Fs23&?mbVBODC5YNd@>O at iN7hXgF1gtmvbq>N#=f zFdAQVSn(C6V!L{oFYZAKHK7c|Stjfibr4#vyi~vi1^ax6B=iX=mH9r3!etubGUjy? zOv>|qFiRBiTt9kf9keMrj}Epz7$;q;nk3>Dd^5={(8(7lX38r;d$E>eb&9mbK4xq= z>NsOTFVL9 at agZV2cs`&YvVv$EkTe1leD5ZoT%a>`8D%(i9*IIFR&FWrF>&-U5m{hP zQGb$wGj*QGpkStkxP}c?L4{I*$(Fg&&Ja|fBtebz+ERrAU+ZtXNEYOKAk`&7G$>tQ zLjtO9(mL at Y4#nk_Jr_T-kq at v!rl$M~UAIpi$SbSJ&=k}B!qi$Pkx9A}+R#2`Tqhqo z%|C++N{5r6HEkw{e at DR+Q9pZB5 z#*{V)dWlabS}=lka$TH1c|*_`LLZ34<0P8Z{jL-H29%S|0+vF6l#&Tx^vUf~XV6c) zha(yZ#zG+2pOm3vXnV%MG;ML#6IMna&?TlhirUj0GcZE2)Rx{0*261-nMMt37 at CxN zs6~l=+7!)BIkE%J+!q)>7tC z)dRwg?s(s;B8^@ZHS|jK$HhLF at uU%({AMxOK<)TC;g!hDQShGXN3TBBh0C at coq0-m z4ozC!eMKj6jlg2d%dKXqFZer}LKGV?zDCk+r_|HU<-n&L7-Xo4yMwAH*9SUg88015 z>vxyqeq2skBM(Kl&N360$O5_-96(<(B7QiM%~j=da2VUnE8*DT?V5hH at F8vnfHM*yH9YD2`JnRX?^9sy?HSXu+=l#kH07I)BLGx~1 zm1Ruq-n*f)sen!QP}6n2gl#~u^f_AKqW<)VlPaI9<$m&_6KN6R7|PA)N9(E6xFk(3 zva59rHO?Ob+PkZx4>v=X{_6x}8799AGY6Dz at ta{8wbHA&VM5>Tu3NRXb>SS1*NxIQ zcJ;4C2I9B6OSN(;LqNf=w8`(|>V}-`6v|C|h=EqMHcNh}T7%_PFaQjuK3 at 4cGo9X~ zym^i0Hq0y12BKwYxIiMhYN04v z=b#O$ucRa&)>73$-|tQVrOvK4*y#-rN#lfy5Q=?-NO*6rqmS{@KVfiu%<_4>jpZ&N zL4Y8)O9(-3fW#A*cZoZPWmJIpN57lI=e_hbq1X@&EX~I$y%S=zi11?&rw?Yfw z at uMPzmtXm*yw-$|9szyAlmx<5BwUZ=svgO(T$$v($~HvZmo@^OeExhXNHB}M@@##q zlzyv^aWLry0VDLssNilvVQK^Vsl31R>e!1St$6l4IerYTmbV2z;-=AknHyKRrBs`o zV)uff99!Y39>8%uwQ^%r$g at nOP0ZGcTp}~TeT?*~ zwDMC~+F`upeZ&ey)@{(@@s<;DmtsYSSlGxCXIZdBT&X5l021h|0;cvWULI}`aq0rC zggsAo246egb#r=|ts||H9l-bd at 3J}Zn(|gOMin4WOk0_ab2|Y^YcN0AX%BOnEN)91C80O*7GpEWaSn}=Y!}8>d5N~l#6ov- zu2T&N3PlFYit~W5MGXrL6?q>&`a at C|UM17RO6j#tk(Z0S51+?qt at 6szfgElZ7G>Yf5K9-k3&nzQW^vc10N!Kz4nP9%c$8WEvD|U>E`IOR!5KP~1!@C;Q z)DKn!0T;FehtP}hZ2NhO-X4P%TKWfi3`DKLbP1)lM%Bp0&e3;e(UH9jweG}WzeA$O zeR5^R7>(kvi2I@$fR&cun{piyIeWH;{|S_k(f06L;fP zsrKZ7QPJ5&>G&~kuzT#33$eQ!uMvQji-b6t=w~BV89gzVn24maJ-}3TN3tP!`ddXW;3QuQnA5cI%%%~`G-8s3!6XY?B-vD~= zA+=k4+acUfL$sb)+`NUYglpmgArW-uDOTvMD};&jg?2oImmFw`^l6Jjs_i%8tDtOm z$j>^xTe)NfG at G6f(BpM$y>gcZ*`MH!_b5QDU27NFn7s!GjxVNPH+pUXBVu@^B&Y*- zLXM`-9#mCNUw}txyK?WgHsCs3)B)=UFZUHA5 at CpGtTeG#Q%%HuC&M4yfsS)ybBo%2 zXH2IYB#y9n-FdR3y?u5==l~{-c7We_T~uW3eZcB>fNVEV)(Y`GKFjCluoxGv>Bc<}_e0#dm#UD_#D5b2JTS2~Og{AkYuSOF9; z|L^_jHF$YC&^;FE{@H^bZqwKM8G78?-_vcn;bXUHoibaoIvag-$N!7lG=QG|=)Xw& zs;h at n^o#DABlrKOQ*=+WipXC*QWF{UQ?lou^uN%A!9|b3Me}Nbt0lWUgjbTu;gyv$ zM+vWYu6lg2`}af`|pP zP0W<`{!P6&cx^{koflf=7kdRiOJLf8ZrSqg8pvv5-d(^PQ~tZ0H#(v!-t at vV5ShsB}Fj(3b^!RA8Ba6~?5m&IC1O{o(L#wVRtd_wnRamVc zVGap%NtjQ);_mB{m*X|ib4Z*j8T22CERKF}i7e-~9=)$!34_(B}28#^O9M~!7 zuWtk2x%koRZgnx6HGvypGT|Ekna%)9TL5J7Q=SnN(x^roZ za43LIf~t6a>>iw-t6+(=dnFOwlZfghpyadZEX at j!Stc;yYS-FJfdieT6nn!;Q>t37CMC}6S&Fz2b?%rEoZjc&44 at xcon(< zeGzd7>oSBcyM>Obdxh>?l+Ad(I`1iIN(z=^BN&9a1z`4pWezJ8D4QYCB+;%Gx0qr| z2)zR9Ds*=M73^jKYwAK#O#H$Sx`B-dI0|l3nE|BaU}ed>PE}Ma*%&Azmpf;1ow}$# zpy-yel_=-p+lpXsuo{F&!hSX+y0;y9a14k0CVGDf&Uv7x%=*x=` zb7?i54PB`6$A2<%)1Rmj-%{bY;8JiAS*?GAawlWF27MVhSd#S~k!*3A#J6^1WkvjC zJlKs@$T{rjKsifUuUrho)Fi)PdPFR?q4fTcJW|eKlFD;jWD0X zC3o-$3i>3W(M*M0KTjdo#J||mH?Fz$qRS3JJq*PfvMo;Sb0%80JEQBV^gMFWy=V|E z-HGqE=_NZ={_i(wK5CD#XxJVN zYRD^r0UH)+`<+MvEwb+KRByq*Zdbmj+u}l;@G9r|uN&v1`pNqE- at a>XRxH3$fM$TJ z0Hpv00N(=yUt^PeGcK`NHkX*^F%UNhzzBd1 at Zu_)^$4I2z(JY+;AN->;0nM!02<&e zfYB8;YY4yufEfUD0ipqx1FQx38Q?g;C4gptR{(u~XS4R*Vzaga?0`D5;Wrsz0l+MP zi2%a at 3;^EVWV8MTxCw9`U?0G at 080U)00IG~0gM0`1i%I8yuoHY2DlAS2Cx>|SPeh{ zL;=hNm6} zZ-5+tuK|7mCI6TqY%Xba#0 z5DbtAum<2efPDa!09ODS0saPf1;7D(`T|%0OaO4X!Hi*-axDFT{_X(w04HIL_W^7G z$Ol*k5DTyXz#Cu+z$k!00D1tOQ08xd+W?mUP6PZ3 at Et$_KsG=!z{1~|w#GrfT>u8v zvH!)Z9)&Kf8E0Hr<8HgK_AGW~jgNFyr|lRtgZ0b`j9AF?@jor=RX+HOHAVxpm$6yx z``9eEG+{U;bdO6DMp7whnfy^!>7y~9Pk3r at oG`;T+9NJesElRuVg#h7#wEx6E1rpn zOO8&22tjfbKTSx9(vs8pR#8x=GQL-&GR`YGj+)Rj!FK$F at wmZqAwZHaJv}@|7#5MT zj1{HAG%S*|iuFcA%RT;N#R#dew3HMoEH*)ysr)-VRTvo;o){LLk{BgS3rh{BVp&X) zu=LoJG%8X=vD^Z at +`Yp*eCGrPD4+f2`p%r|>Jz|H{_W=L^9fS5BlpVaCH~V4Pg0sy9RoD&0u&wXs!UC zy^__NRg;W4D z2p%NA9}IyjD_WEsN$O2bq4?oTq4MyEL?M)k6(%OyO&A}Qn8 at -VP4cM}J~Lu`)rj#& zTBVQVTl0l!X(?%Zn`KtZSOG#RBqc2Ym{ZNuqel14fVc=LugH{S at 03W=rB9{1rp1Vo zgvnI;$BB^UE=(r`WQsEM821ql*tt(9!askJFfG$7nG&W&herxWdHD&`(&N%8oIt%= zb4y`n7NLJ~r0`SKrKhK_0Ei}$vPZSB6M^=O=E|H2$PAB;@ zy;G87!aUMal7ds>qWq{djDw6}9w})_;S`exNF3%Do|Y~Q6s6*bV_et_s3wZkOa6BA zP_jqZ9AO6h?HdsfV;TmmE6YGunKsbUg+~kGNz)Vt9s{!UE!w0RQeb12WUY z7!F#>KR`jlvX zk}wH4duVrGs8}ICHBD$8*K at 8aF=tr8P>=9Pc=E{J?M4H8^eCw+G|DP!G(#&YE=kB& z4l~Qv#)j_`9wp?%^qQii$@tVv)&z(rD}Y-{YNj$lRYslWyHuE#kp at zU$Xqz}UESen{N%@U$o>2&7Vx%qpD^gv?A4rST(El2S!b^LV~nVq9bbe~xe&MI=`W z&@ovge$QJJZPE>v20a*U9bN-|kRfxti!{VyC) zjwtyJ39hV)#hR0%;=5k8tMNr*sBj*y)LoPXmUj0qO}In^;vK|WMr<6cKpElbWL*De zHL7}`d|Y=Gg#RfVS>;MO9tI;3q{&C!PvMKwg;Q8T$;xQugdYZ at F&8G|QlV8kvd%ER zwTO?AR_WF%o=i-M!97B(j0hI%qwFl3i8lPX!Wh2$3_cNiQ}|zy!l72_&b{YIx)8=k zn8{yCq@#OWdTL^LCXouNej&|~)omuI>zpmjBsGS9a=tO at 5#VHC4G(vcuukML1{+hO*pClBWnCx-V#jar-=Ab zDay_YlaS`I;Y(q;7ez!`y=A&K1lF3=0Yn%vHsC`?|S$hxxd=d3wzeu>Re at Xh0j3k^k%<@>Ta@ zp%%HyQsea)F at XH&LE`ukB8p-d>Z948p_=`OLmI3s$~u`Ur$E<0uE5gMy`;}$u&@L9 z3UXSxo^_A+!XApAwR#2uzKO!{bRkR{XddPd2<9~9Dw6uIp5W@}l^&EXOmj_kjY^73 zR>?;A?bgc(6Yyc$EJYn0mXj1ROrru&ixVspg07}g`;n5cvOtUBC{*L3kqqe+&Y5BS zm=vgNt}qq26NnoHLK35q&QsK}FpmgNjgLT0qMMH(x3HV1{}s7MD4 zYCI!^Ju8L$Jz0P(co^49v_NckoN|IQ`To5;_lQ?5>B at F`F8`!I|9{L|^|*XAa}nPj z;;6U&qnv-PX+m6lp*`h8MtCw=bQ${S^_1 at XXwXV~FV)j~%n4YgqvO(&Sggq2@(F|% zQAt=tpBc{&Pm94t1kpLkLU5q zU;d~xv}#5(x}z_c8A+fydL)L&D0NFvei?NacpOXMyk%GgdJOtc3Q=giLBk_6nyd{f z9rsiDBS~WSToxrSk4yb%b;j&0l^v+A01K-0&;QUpS#yQqQJFmiGnt#cG@}SurkMr1 zS9zcx%yGU~^hYDQG)|a-lOi$}^$?gsnV6Lf=tt95NP#SiNgoH=9kw?E7(k_!eL at 4v z=+?bvfN~6reOeg9wPC2crKKdQLnx)(2{x>)t&`H6kQccJxBd_ijoIZe=8Yuwo%K1t`Bj`-9`yCP*fH%4unkbl?cmy`E0hsbWVW7x~Ffi;UyETU{65N?Q{1^YvEEsWWeKW`D>ttWPM` z+x2!&@ub9}qcd*Tp1JGtrgZ-wiH#FP3m*us8Q#7(-}(A)`)!&=Okp+q=lC~V>>plt zuDIj)(BbEg1#j7S`WkQ7*^}E&vPW_AwZe`z8B|-!^pAYCBIWZw-m&w38ysHYY#m+l zt?Rv|+Dqy|-_O8}*+wE?iq<^l}SvK(vi??=&! zCp*#i?~`TH75S&$KNx&J!pr)O>B|*2z7~4ld^a!rsLxgZ;wcoXbmt?RGdEp#J($(7 zvC%o{{Pkbnb=YORW6LyqO8 at O5uSpW5CUe%0tr^P-~KrqRQS>JA^@(@-&|$Q-ufi=UTxj2)9sDqc`-U^{X$q; zt7o0tXtMm|ql06QeCc3bV(`P-`tJea)H!zmdvPnbPkz|BvM=P52ZX-6;A*XX0zCj2JuU_|E)MMAjXT4MiY zkMwptt2b;26m#ZO9 at BIT9}%ALpO{~88f*||M*X94XVr^Xe}sgV_TE8*w%dD>InM at RRPvxITsS61RWV! zYj!UPuG!@3%PnZS=yj&@dI8<`_Ht9l9|pd~`+c$x7O%AyZ at Xqu^p$_#%2~$4n*wg6 zb}akOwJkiT;ox|a$HkKyii<2Z<%+fD7n!d%+spdl?py1_-(R-h8ycGN*dj4e at nE{^ z&22^te*f7rcjF(X+rQ>}9NitBvA68khxGHm>R!^8;^r#h(F=F>9SjYXZ})H?btNGs z?nCO*g9)~dZ41Wm$LZ?mCplR3eQEFU;HXY$^^(BUr#8Zbj at K{8oGGca{aLimLNalO z&bRMMw+}d7m}`{Tyw}R1?dViR=F9XlPdYx!{DSM$G0pR|d^6kKvm<0*S^HL2TLj+K zC@|gNa>l4oOb`FHvdQ6`;e{Oz-W_L#ezSX%-qlTo{d8GZ7JAw?2&55jlP?84%m at rU zbJ{d<$Ijc+3-dP{ZCdA7)X>~#@0FVRw-2AJzx?)}g$)sDgCA!mG#3ZDY|r(bKd-M* z|AE6zd3RDBEPrqv?z}tUgYfZ!$1ALKFNZ`q)Xmv>aGl1Z?FD8-uk5y4{GoKrp@)_S zp4Cl1ZY}P+J at KgSn%^uvYyUP4s2lB({Vp~`_~=#U^v_RtTG~_Qx`QW;>C1Uh8Zi4< zq37JxW?`mnTlV6vmyiECSNWl9#k#s%&O0t&d23%FKBKel`(=TpM;wK__slL5JL&B$ z8dm$ZSU3OjL1W+0wlJf_4v*ibHzj>-R7tt at UGS1U-1YB@)WnJ$*Nn!Q4fdP0A6u`z zP|Vtun`>S=sK9l}7iSj47SS1QO-+dlhWJ{WhGp9uY_&EYdEbIHtuL$SRDi8`$F~s& zrPl+Bzw3XY<9p-3I6tCkL4Lh zLnG%jyuHkO{ILE?@s&4)xeYC|`ffU;KfJKyV(N|`C9Y?3Jwk_T>LfZ0IzQdiHqXdt z)QlqE7|p$b+2wCDgH~OR at c!(A!>UCc!;600ZFKD3CewZ9tiV4P*!sTT5)skR7?62& zfbT=E;Ow`48?76 at ylHVIqF+Iw_1rU?Ca$B;d{)=AL;K0g>_rDEg)@`ac_x0oBS1vk z_nop=M|XI9pe1*#aC)EC%-x%Id6s??ZNBc8kz)$Zwv=9e at l#=)JfitS`S7;KO=lmL zn!K;uZ8%+AFe3Zty6LAZ>ry*SFUL7~Jf5*UJ)WP%hk%PJ? z+qd`q$?nSZg0v5oc!4=H4{Arp=vFK2_+)cAht2k$pZC>__8sdHJ={q zvH1PyC3 at 3FbyzI1u3CC*-1is%8GqpC$)A at OexLgKuX#%j&L5J##qM63h1L3uvA+IG z)1D8Y1V7voExRYl_AQycvOm`_YiPu^6~9n9nd^tlSaxX#XZeRq)n6VSUz+%pdv;Ru zTZa_cw{MdZ*Aypw at gyUjJ=ZpNlEce5?#e?E(@m2iqkkJ4?q4BWe6!(K;U8zCqE2fL zi!O|7iQ!*v at Eq`ysn?v(0%m85*3Eh!+3b5+*J93hCl+|`x?ki|^!<5{L+#o#U(In5 zR9LQdcc1XPYx+E%%b3SrGX{Sxb at PjT9vH*63z}9c3emlNBKYg_&VV!frp$e0vcm6H zc9}oz_-Mg{0R9(io<`3<^3^Z%Ca#l*YQ7u2aDgCYQPSrJ!g9L;;$_ng%lb>A} z_jmirejjS;ufN}xxVdYuc93G6 at u0Wn%NyU#IKSYKTDdW+mAh{ANtSzKW9(A z*JzN?&=$S-c1dH~%~eOOZ*hK!sI$Ae<9dAR;~SxaEbEQC7F`&*Ve9$qF?XuHT+C{& z1cy|;=hZhVHqw2{_Z;%My#2yAFjBP_pZC$PrF7h{xFxOmeb9~aYu2aOS6wz8 zT9@%Y>D%!$#(p<&lx+RhX}@mzEih{Hi@%3$ypYqfp|hrR`?Ukv+cx!c*!olE+aE1c zii`SbWo(&s#`cG->o32znItLH37h@(qKAfS{o59~Kv^?di|a=Txb28n)goql51`O%#_Ub;>BY2SwxJAd3<_H$kO z`QpC at Y9BA#?NTE7ef0^0yT2cGI>I{^!Sgy2B$OVG9oBFr%+vI=_N#!Cwp-VoI$qFR zDsHhjduP#tb5AE0mErss#+06vUew+9>`-yvkj)X_OlWVOd at n4)>Xbvnk8PulUnub9 zW<|A}P0{#Dqv7Pa+q3RB#=o%3c+e7T964`$$UwihHoIt(?Z$7Hbnc&VvHZ&i2X@!Q zxN*}&BT}yZp1ShDMpLss*6&XIGJRW}*;ga!YlCKdlm7F5e)ZwnnajD;4o%qc#l^K| z#W7BLb*!(B#yR+XZV>il^P~3pd0%b*)A{;WYlO#JrrclP`^~lKqwF%)7`OiYN5;Il zhK)~MV$b<^{?s|P{rvXLo?~o=E$|K5_UKiL*UI(We`>OJyWBD>f1fm`Zu_LMCx#bZ zFx#|!$I3Ay2hRL(dIjH={oOYm#;Z~lwU0kJIpSokwi2)Xm=bzl$G4+9|civx>*IRP3C-#f|sbe$mg&C*KM78F|WRl#5nLb=%=zcIF#c3_5&w)yjq#$92o< z$98;{yk7J52&d?8zZd6E|9sukdFw|9E_Od|tJ7)lV2{s`)7PH=`sBfPU6PaIJz at s@ z5Xep)`Q(ge`;z9*y=!f9-rNoPOAxtr$Ml3ZHR2lsV(lx^cm@Nt&23Ld9eLybZCG&Y>M6GKrigXF+5I@#?lW%18pEVt zrxgG4(B*8!n)h0tkMAO5B0mE zUE?hlZF-&QIL3GLrtP6Go)7<1YhZ%U;_QZh)?SbJOJ33(Hg~OVfZ0XR_ zy1#87)!x?I!m=cEl*jJ~?7&zdh!P!DT( zzc(3wx2J6X7>_;!9fmL6l_QF-ZZy_dzPRh)wX`2<(=QEs{!7TrO=fL-9S?lCvZ#+h z`zyMb0?yt3UM{PCbL{J%1o{z1?E`hY(6R(c9-}{?aK_9{6>6-ji;xkLv zwfxcV)xs}6%$Io;O$&NHRC{%zd-$}Q`>sl>ukD?j?clodcIb$z-ov=s$Sid{kks3|FgY^ zuPnc6Ky3&=75!XWtC{#_b!;|0GYTKKMc4ndPSW{-sqr1($Rc-5+7`8Or_tT~37Pj! z{?c+wHe_>sQ@(DA_t at +sE6)CP(pu-+bA#6nK4Bacf0+N`<*6l}xn-`+gYMkSO1j at F z at 3>j)FTRmi(=V|oI4WUJ`G<%%%L|0HPfRox`bKIeHofIJM;3Ao|8Yv at U-XVMUUX}* zYwOQ3OVXxlwqBpjxjD1mFz>@Iz2byP#e3e)I9il=_e|~W{iSbQCNw7gA$YJ*bnmv| zwcoBg&mYlbv){j&HRWQ1f6lqO at cu)OcN7O7J3su|>5W at Xp54V8#Xh+$Oe>#TZP0Y| zh`!A7^OO}|&5QN!QxQJ+w~}aU=cV^tzty&?VXx_j0>G%7M%8E<%oo8)Z0E-G>Jb at sre$uj&f4 at JKzarxP;0LC6ti8UzvEt>so8Cg7 zqv7+W6#HM at S<0erp0Rl}>%lHp=f;f{O3MB at 38xZD{B3;mP{SVcHLK|Z){v^z+V#*TWrNo$vbu``ljyH at Yd~%n%@|W zZmKOhTvs%#reV+V`Ny0Gj6ZI;>crWFPUWW)4?I07=6`?gSczuIBLnJ4{r7+0n(6ps zea0cq-4Ukg_xg+fh}7 at 9A<8M&Fe!f8^7sXNT8*yhuh&)lsK+`uGD~OMk#^pC_pi7+ z>xXE(h>g>(?0Ov$5}G57xjZn at eo1nI_Fr8$&6i4UjBPQ#vwT6!{aIJuoO+P5wyf>V zfD>hIix028b^G>C`lsvbvmF{M&-*@jG57kJnCHKihQv%S*1qp~)P8oO;n+Ke^vu2O zHD|qYev48=dCis75Cas z=-+)Xbj^+Wo6m1Pnh|^dSm$SV#L275`v3mw)QB%q4re?xJ25jOpS$3(f)}3}t?hKF zkB0ujZ-mA)!V?rX`X^o!?9h9(tiiDU*&L2I&{6YPiW$cFI`el==4A%a%K|W#Rx~b-3QvvsS+}b|FEHA+G1v0vueY22vHPe8zie+t_;dQ=#J}a;gIye!~SJ;sLs)+ zsewy65`;Eq#=Ly}vu$OG#A2Q3Tb&&f2W&5WXOvrb+G=le=G3EY4(Ts574fuZnd=4f zta;62rnYDLYRMw9-79^qzq%V}QN6*mK%>y;jLWaXY4JISrb>q$7Yv7<>G0OuwELTW zg`2J}yu#8IG}wA3zm1S)JPf#W`b=ox&K-%S`GwPOuiIp_xw)apujb0$K2ILLt^eok z<@IS14GR-89}jj3EN-6fo4dWgQQvty)8PXx9a8T&4|n}R_#t8UipL8chg{aRnp5Wx zrLpedPP2mTkL-3|89Ju)!{P&$4-XxmUiWN!U$ONX-J^-Mo|eDW1(^Q*F56 at DBVk7D z=hHJ^*;{&^7_4hfar%y#G&`X5#az$AW0}I{)Wz9twtqc-+12%-^4zVubt|r1-r+1R zuz&mgI-N5|N&}be*)4Q*5*N)LRe4*4lH+P9^!Jya$XTEUDpoylUEX>!41l?1V+( zo~^dc33+4M##~Cse0n3*bAM;3dCEhNG2^3~?H{hZptA$a|45&g!eQ*AFaI>(TltGk z?z))K+jop;y>Rg4hvw~XZoj-T9afIBusl3T$MpG- zF&-0iY%|h5Qy(7pPpJE6xrf+3GW2N1<+lxU>K{Jly}43+rKKUa at X)5dvq}nw>;JeT z^(?li8{8X)6b7GGRlkbEt-)XxK}eM^KH3z#N}1194>rTG`wTcF{9l- z?lawV at 6SM%`Fmg61q~4qTdrmXGx~gtEN)sC7WA9A>CD{E&d}?$ zcQn;4%6|F(vG*QeO*CKM at X!PlkYGWu1x3LEA|N(Y5^88cKtM$#K!8Xfn1pJhND)!0 zfQ=$eiik>81nixrVn+}QcI*xHJG)6Jx$oQie?8ClzR&wz at 3-sm%x})jnRd?X>@wMX zWlBGNh-sGO7hsn?XOV(l&s1s2A}Lk-b%bT~S!snKj}z+5^7E3$d!CG&srLR#UFXY| z9XX9ZJKXL(eLHn at UDrw3!j``2agocGB^}-D-=bQ}xH9G|`Kig`)jxwm at 6&xM&uw1y z at r|AI{4Zt;0&Aqpj^rq0^~No`s*|Gn6Z_K`5;?tYzS!0qexfGzDhmOVT2Vrw+S|FmrfaOeG}X<@a!R(^2IAA*Q&dzx zjF}l+p+4()$K)A(t|sbBhp*RIKjGu-V}WNhKV952Xa0dh{(E*bu6KI&g)?bo8T-D@ z?x2)u69Si0XK at wZu>)@IHQv1Ke()BHD(WU#sj(ZI-Bd!txOu_kNhzV?*_St*zxUci%C~XtPQ at U7w#?PA|B1 z!^7ibx91eG<6fPN9cH(#5GiAF6fNu~Tbm~Zgit%&`>ZEOU$I5{~>BJ#Zp3XGf9zW6kowv;j9Z at gGfV!m))$P)DUmN3X7Zsnd z**N;q^7_p;S3O$Tvzlo4aHZ$#Xs5iGBWqqJ={nE&A?Cs|UbXhn9N(^a`)BI!Yj=XzxZXZK-|=qWHo1FCyBBt>|5W$zSl0FjpH$lJ z&$oDT%SYyRdy(hc8*gt{-PAZ5-r|-g-kRN{bN#9Rx;9niHRoJ@$)DYqs&@YF%*_|Z z8Bk6;I$S-oOaI5I>k$VU2|c^6SRFoiIpJK^#Y-E`Um8JpchyLedo9}6u(|QNa+7GS zM$+LlgGArHpyXL(&6MZrN@)d4_oaK-m+YGI;(cmo*tx8_3tuxgA8OvWU{u=v?;$xE zl`PfWK at y|(&OKqi=R at nJ9oyz<#apbE+$r1TlF+=J5f`@q)pqiid)ve*5wYi|Rz~je zh>dWPYY3ZE{xtml-PWxsSM8&g7r4hLj$0IclPz2Hs3rR#@pyW%r+8CIp5M2!m%$qk z&rq;9#41%NJ#>Cr;k)p01v)S1p;)Opd1r7vz*s66~8!7bf!en+qNG at rHi_upDjd#*zL?ItN|9P4DK z$1RZ<5%czjn)mB&Kj?fIq~qi+?_i_knAMZ7_5Hhi)@X9+*YyuKG+y+&PF|QT}5>@Fyx#7wjxidj#K38N%9^f4DzCaVdFyl^5wxtm(<5S=D z--&NLZ8kgHoj+c#R?VtP!azN*{O$P(GO at Yg_sW$QdfnPGt1UPw#c%S*#nZotMea%T zOq#PSc1DWR>k1=Rdzr72b0=9Hd|2?V`S`Eohp$XDYqr_%RrGqL<}bRw*X+b4%Q+qQ zzJ+PI8$Vm&vn+ex^~ej~PU(3ohQy6zKVCEcigTAunYI0IgU>w~)5^BTa>mYoJ61fx z;A`Ddwf!p3C#&vW5lq_NdF!x_bNS`hE2|c-c+%=(YO}fg_ba8FZi`izcVkSXFKR!3 z=D%}ig!=2u8;Q%m9f^4Cuy&Et+(m^6(vg#O{GQ52pOUIvRN#Bkpes+JFZz at sWyYMt zpRCQbrv!e8%Os>_%&HG2rr((u{PB`Sqgk0|+fR|kbtKRHFKI3ls52LatjS7Ce=_ym zdn+F$iBGpSyFPPk-MO}->g8$aowlKs&*yt1lRGVSDs{0B=f2Q#cSxAh{*AV0U8cC? z$%2y&BByR2s_ at erBPDG&k!`&vziGzr>J8C^kr%qS$?|uu%`B}yxIl?u5|$r0_4aws z0M}3vd3JNzugjcEdmZ!MO?f*m&GhG?g+;~$m#cwRZ-zC!+va}XX!;J1ucV%Db3&Gr zQYPuzO+Rw?<(~G2W8cm{A7Pvmuhc8&mSf3~)Ze?jeyf_^y|m=r^E5So*-W>u-JRLf z(HFmFy at GDn*q|ak%_c$|r9^DmQ{xkn;l%FFq`=kApGQ%iul)IL#nKFB#4wc`OKC>u zw>K4Uo?q15zJ;{F at 67Zy8w0l;JA3&}`^t&!tgf6R!#|9k^w{EHNcgBLj9Vo^az|@V z=QxXOKEH5Z(XTMoF}t6@?f;0K6PEi$dyXw!dUyTl*{qJ)sn=^M(&FL0M{1t#%9W*T z7=J`_l409YvmbsdHJ;!5d}g^I3G!hOb^;m)I1!DKTnyo0paplqyGl6CUt^qCs6c1zx_$%ZWgRm`-0+gQ%9|2)Epeca!fQte}0FePdhWyZhQmcTJ0jEOwh7b+~S_Jq8ls^i>>wsngt^khp zXB?0@;E#}B0>TbJW03(gWt5~2gad)}KzVkpy_~1fn)t04P*lN737zOunW*6 z!26(lcL;9=S_=3+ls_KAen6Ul>wwDv?F6z0+{YjP_2A#~-X&Y8v2T;8=e}0T}~+!5{xMKobG);gA0&pe2Cs^2gs7NCWV3;FvzR zotCl!{GC7kSe{YP{xkgX=K#StNT27AKh~!ZFI at l}OCJdY<1GD at KmMzMpr57F_~X9; z2>L;~oj?BIZBo!K=_=ruzU at F3fIsuc-w{Z0(D(-d%?EiCfBe0G)Bu+O$I6QVG6npG zKmKchVBDlL`QyI at Xc^!R{`j+i<^Zk-j_FGPq5}T)FaCc@$p3TDQ2*}|^8X|u|8EuY z|08Iq|G)be|Gy;U|3zr1|34S<|J_3Ve at DpwYtc~u|MoBbe@@8%^U+ZMe0kW+nvnk=LPP!kwUGbs7xMoHLjHde4fX$@|Kk5=g#14j4fX$SA^%Si z^8f2X{(lS&_5Tn5;{R8K{J#VZ_5YVb{=Zkq|L+O;{|PkI|9||8|6dUD|3Wm>|DOr@ zf4Y$W-xBiwYBbdUzx<2;Hw*dyVKmhL-wOGEmXQBH6!QNDG}QlpBY6oCq$nXGGD?Dg z#!HCE6C@;%l7xu(7zq-ZC?R4XFCm7;Nr;G!mQX@4= zLjG{buY~0vN05-j@(&v$LBsO9%1el2`Nc*{%*FC+DM-j-`DMmRoZ{t21cZH0NeW3K zX*3KCM-pfR5=Syf49Ox{&nrc!S<{U ziXXa;;z;FUI2>&ME#1#UgXO8=#$*lE?Z)V at 4IAGVc=G9-1fj$%D6!axeH~)uQ9Bf~#Ko7PtmQCm2KN9k#bG=!-?e>T$_~9}B zqhfqJJH8Q}ICtT~c^cYUbFrVav3-an4<;0ZU{u9ngonXsOTl;#hq0E0 at g<;9ND+-k zlhIT(3+bT6$Pn2fcf>)FC>NbU-AGh~SDw7kN#LIf{Bsol9L+z+ at K1R}LD<^~M8Kwa zb-?UR7IG>f6*L!-5DkT+Q%FQ4Ttq}vL`+0nWSEGQh>VE5$T$(Ah?a&b`{L>vIJTH zqz)v{mxXzDn)F(<5+dKA>`Ss2DTy=f8wcY?`$GG{xRcsKe6ffjf8e$hnx&<+Kg at 1I zOg0(t3m!J;UF#8UGhRMR3KgR{yR3=CS1Ne=2mJI0)TZFLgNKhN$Qn;O)I!KOD24RK z6T}in^M0 at 2$TI_zBq5wEgj0lYsu0c=!Z|{?MF_VF;WiRZ~7MIXU*6PCVhFnQCn-A>$^buZp>w-Pu>C z_^O4lLmuigQcWrBmx3=Lq?xb_mg$YF%SOm=={29*)|o`2COG?(yhLv82xrTlxa at eh znXsD+?Y))mW?(*F|MBr=Lc=DIGfdu~vWDgY#Q}SwshvPU>sko;^DzA_Zsl4zK|fOg z8}V>9NyIdIc^lzG6>?4AU}CA_yDKR$xs{Nt4EW*d8}35+J%sQ(y+*Oyw| zMyQ&@!v^(fmd;7g-s_;xyl1|_lce(Q$Yw&}C|>(fjCjE0w_qQ!kiMhz8+w`e(LkZK zA9rfn*oJ|~+Kt3DNA6$|nN`0p~VFO&hU at 81noYrg;Q;?#( ztCf(-;!OZr;{HEo55i%LkV)tEwGs|4gZfQP>eXxMu zP}z?8%eWS;chjohnkviZzY}$h&wN>$GqGm$JdwFpX?K_nm%NmB%Wc25S?|lpGv=-?6DKezikp??izZRFtc zqse$u8zFKJue~Qe*l1N|G!srlfW4e-8gWbrR|sMG9bF>Zt=b4tYw-T{LX}+>#xaHWIls&PN(iJRfWmF$t>Uz?1VC1`BF7t+2r-biSz4hg4>~ymWcDqY-tV;9d zDt?+cKNjUO?)-iR{=Aqszo|Y7U%2wqb;8a?pwD~$&1ph-x)4?q!X?LEPLSWYhmxn5 ze0l8IE0O9#^6oqpllESYY1}@OXB!mF4024mZ`ys)rc{U-bHn6RGxEB&icKA}&oJ*| zNs_$nr|98y%?-qz=G5{`divoDbIO$@KBwBKt5S+rwYqGL4qKRYvav?>wX<>L)0+!r zs6A&D-xk%si*ThTZ>8?2c3%8EIHqNRkta z`F7xX(>Th`*Db!4aP%5BAH)-gjr(5LN`e1QpGjdh!QTQWwDdQfC6~VLtjK?^$aw zrLvW9tOBs#&u6K7)~#GB(n{E&%d>~aAD4E$hwEF$SYG)$q94K at R<{vy7xU)3o)cs4 zTJ5?{E&n*VvUY@`#dk|L*Py&3rg1wLnJZ%HzV2(I!jQ>7B at _rl)cw(V`Ss-uP0_2N6Ch??DX7N$PLc2lCc)2 zq-`)LZg#q-L+evVDe0HE zL0fNatzJM<8F~FkqWOs;wJVM17VI(p at ylY9?l6P=`cj#4hxN&Zd-kXs)jJyGjduzS zy!GsxiGf}9m8Ih>>#aSmSys1~&blMRO4n>Pj{RNU8eP-zpgMlaPeNI at d9>x&4 at tvo zel5D%P%-T6^T4j5J*H at UU> z4C$E5k!33ioj&%YXct&}-ZRS%c^Hr>A^m0Ei~NXdcg~rn&FES)XT7zb^^Juhzt+8{ zL?k&ofp< z4(Hi!iQc5WHtpOlwF%nwuiOg%YnEXVNN;jum8s`Kh~mXnW1xk_Gs z-MoKI;K*6e8`qN(cS;oyYN($V$KAiPwOZ4#Fnkq3t8k at zko76$XQFGYtBo{|lr7Jw zE}v`L{_d!wSzgn at XXjsRQWO8U{e$7e_v91_uZ|JweNz*nm at ZS~Tut}LYoWHZwsv+s4 at p59VVo*w$ zZ|^z_qiv44LCZvma)meP?y at 7F7{`2;&G%VH-Bji7lGt#{nsRg>y;}Qvo3g~~cVB$7 z4wx2~&A6_3zSr|n*)3uY^J=b4Yuvq=Y2<@n9{Ve3 zUt+t=M>8#_bWmzqjby*PU2?zW(eYCc<#i3~AA}wlF(q`re2HzQ?AM2s(i%gpPu{^R z*lli-!_Spl0F}u)MJdxW8P?;qLg;|xSU}huJk@{ zb9B?EF6#pQ8FJ5BizgdMB%TeK8eqBA!sF at puj_qM4`oJ0X&y2wRF-C3aVWT!*xbg@ z>8qyhk$G9(+gG7~An at VCnHSxy64yBxl^S!RN~50CRr at rH zvR!#|;P;I7$LsIVI_g|RskueWh;1&;;)#!}<}XvA8z*@zJe%xdvSF{K$C8 at KTGYM9 zpZ%w<4QUi#G;!03jxnTN&2whlok6^-(wk=+u%XB-%f}+-fP7toy;-osmGiSq!WpsM zHl;NYntE#r`)sPIrLzy#7tb|HvG-fJ;any%tRJo#@v=;Ly56*gj)Pq*EvpWYjH6B0 zy`Ys*C2m{An1)qJixSUPqu<)jmiteX8&+0mu%B2L=PA!VcX>(Wee2{~KALf-Qgz44 zy~@ZS8d*gfd8Vg~IS_90So_W8xyQ}JsrfJ4e)_s}-mGmXI&W!k^k|uH)nSVF#*s}Q zL?~Cyqs?yKun>Qnu*P0A_jiYdNl6dwqQ{Puy{+ at blrGn(kO~(1nre(qpIO|fH(S9e z#-gFcvgP5Cy$d%Tk=@X?FU34v$K>iZQO%9ROB7VrUHnDO^_G3^XDXtZRhC$^I6KTb zwQjefoa9LqLx+#%g)gl`N9_pb9=OL+*7)dTSeQ|5#a%i5oKKvsZFlVh^D0 at V zc#Y#*HJ;}`c7FMIziCa at C>haM+KT)6OUK7lMw?c=Jvs4=c3IhouXoLwpITFEkR0Q- zb@$eiT?;e!O&&$9Q8PPg!CcQ}O{=W?9P-ApdfI*6$D%(?GJ?Y#sqt<`C*0SIPYpXF zU8zoWW_;ahz0>vG#M+I{7bYopY?-?~pORAJ(`EUcVz{TodaaH(jk<0B+-8&X{gmmq zD4SIDC8)cLf1bW%YU1D|$2dlNzL;DUv%s(6w)VoS586|`%>zie+>=h`%SQxDdK9e< z+N^I at n!=bU5hdHR_x#9fkJdjiNlRRPEz7a^m~_IOv#PXC>TYs`$5<(iBJ^|qv77tn znH1c(tMgNSR%qJlIa|2D?~)GRo)Db>q3pnuwWdivPRFfx1hjH>Yp#8MW1&7~AzETnMw&{J8X|$7N!TF^N&L9wzaMW4<`8 zk(254cO6FUkpG at wk(WAbJMrjL$q%XXWsEC}4RdFl;cA`RG4p)=hS^3JhnpvTZ~FXc z-_IQJ&O7Zf*Jc^*cXS#4lFDL_U&w90V=HMLY3+3>TK>cSh@~oFKkKNLv5F_uTot^E zW~pACwsFsLYFKCJ4E102}5Fu>q=tjw(a+lmrChKJwUHubS-k*jOn>DMVEwpLzo ztax5#nnOJMP-kApqwWvqUz4^u$C}!ox+GPN8d<2^m-lefgYEa8R~*+}Xjyr6Y|hEq zqzTKdoYMlgJ6eZTJ6LJvRl0EV^v92$Zb7OUrQCFR>5|*V&J873ovTexSjSw6InnCY zHnuy#R&Kb-zDZ{9IXC=s at 7iT2GiIn5G^j1N?9()7i1BU<`=1vziwwo0Lh^}1c$^Sc z62ip4V7Vb!MM#c>%9j(8iweo73t at So^fE$nX(2pPD1D<()_WD3CPMKTgcu?-mWc?Z z*ILApnJ}}7aOerFuZ at D1$Oz?^6~ZHgv5>yu!dNK1n2=mT2rCGsA1Nfa5yI1i at N(h) z7Vb|WJXWYYSD#Ne7q3*7uf(2TX*7hk%%!eW+R{Qel8Qf1Z>Y)=&DYg$CG6V`_Zzht z#mM2vv at s$rga%pYznX__N>SNP7ggs`Dk7HMo~pBms=IEezCIz`!WfFP&q~RU3T!6q zd54IH{n#Iu2-SC~5H=9PMnZVCaQk~N?{*UATPH1gE1 at bE>KCx#+ugD~HVYj9=kRcEqQb9}&CP^dEf}9uesryy#Yi$A%rg;3LqvQf=oeQXA2?ShL_*v>D0K>b6W`8Ch+ukEqfDr$FYS_tIflYgs^{a zQ01e|1Nx8`8ha=PumKOhyK?N*y4V5yUI=#!>D#;^-p-{B>Oc&|#rCvYQN~o>C`tN3W=5BT)<$Eh3`!KJ6%A=CDBwsma7*>?57E0Yuv3=zyp23Ysl`4-ic&Z1sNRTrDVm(JIE0WMy7AC##o8RW#B!^)SASom#IMtoU% z%ysK}_Ds_UDLcyZzxgPc9P70%p0ws#wU_r1 at 8*=JUUw8oG48t+XTEV+l{4+o#Z>J# zq;UFrMT?dRjwjS!-Cn$mY;a__dGj6B8+M<=2c-534`SXMUSS+|Ird{iTVJdyu&TRN~y*z7kR+6M)iRRvMu5)ElLbji(8+&|?QP|nIm1ks3 z7VmjV%de>zMM}!|IA3>JS4$_+&ELIS-8|n*wmNLs(_6P&Qg0US*kF9DqG`?X(h;#@ z<%iJoljn_h&S=R>vFnoRo_t$F|6q|-u9m6le%;kM^(hX&J7=3&Rh}~l-=uu4R{Oyr zvwVG3Qb~7Z;k3C&CF!0ceqHXYrQ`(9a<15soPYPHcijESM9cV>Bu~T2f at y{C%Ec2? z>x|O6$Z5F>wvT)ZmsUkhW*O(~GaHlnJ!<>1dd`-RANrJn3!A&FRh>vDZtk1#JR`}d zVC3B5tLz&6b?cP1C%1pFNG}^*KO_2tUe>12fIUg$NV{*{J1TYYs;9Ks=RF=y(=7{$ zr at q8X`E_qIK9b8#I!M`;J1nPc*51PHLeICOzpU;EH3ht%hh;Z)My{1{Cp9R$9_30* zGAOHfWPIib_}3iX-^U_n+;pa-!hC`~FAh1>%O?D$+<)O~lpHn2!>+(^LJQ}|sYS02 zSX3&+-%E_xkY at u^D<|(J=?SpiNw{`vWg;hF->J at H@OuwHg(MQhS zKl7(b)y*#)A$`sgk{t7+K4w|d#x7kMs$LXT6Y~D~@FQ>w3 z6&`7e=ATus`<)$O?`c at Cwlw_N&S9rF(lk#HrCSMmMqted1 at EG9X ziKTth9tVB9B58Ph-3doEt^@6{Z)?b#5*33T`;!<)Y3+{W4?i7zf3^^I#ex4^%1+!x z(QsXKWG}hEPifv`s-a at i59Jr?nb8)B&oi^)T;3kexUl4G>#i3TiCQOiKgw97e0NW) zW&Sj6^JCSz8{Vw$_E}lLS<>=^Y<0MCxyo^}^N!Uc(_-h4H&};_;*jjZ&zwms&}_UK zJ&Ce+DtDo|xY0q+Wj=DP8I5KKru^vOKKgO)L`G8atE(EO^{dZa27KC-gv`a_vh&n==p7cdwfML(ZQ%H4G%Q{8f_ zNJ-|f^_c|UvL|U>>n2zv%A7ZoD~?sy+&p2noW at Fn$Vrn}5jxJA%WJY^E9jeh$V-fnUT^C4rH^jaml335M%Y_svhCwPQ$&1>WjkrV6I`v6t?Bct#wt9}d%r#YmGW9j zMrzW{jr!g!HFjm&)FrztQW`0(|iVl zvC!t8{Jiunn`Rkr3*a7fQonAmxBM%4^SynBQSTm0t{JuV;?!$(TRYa?vn`}8M zxFnXn@#di$-lp}U`_p!5-*HuL`}uU)*Z|7D_=2qQx&FFjPlDd4Tj>^iBe`3PTEulO zP`R|qs3g;(aYS-x>gar&(kPbMAJ=vbxc{9>61Y2p>DH+ zU5ksd55JQQeX3u0B<`Kj4#T#-2A>HnmzO1O`qnOQ7O9=e6bX7{nmS>X{_;!JmM44{ zr+koUO5v^_u4>ml(Y)aLja2;~zn0wG^*)HDp=4cCrafmVTe;Ht?rVwM3`Ntpd*`0r z-970@`pTc at eX?TBV#HE=dzn96+5|Nf at Qv&tOMC5l65%r;;(P> zLX8ep6Q>#_87Q0dc=_&IU1PPc?m^cavi-Y_&XTn|Z!9w?+1-92RDNseD|!D;#kxkL z^m3C6msV%q*Lp6kyKohZ_jlfYuT8BTq+{(g^6pKs=jALWI<-8G&z|s|T#T_qX-X=8k zELTkPm?za}nDFanV6>f_ho6>#(Vkl^X0cE6rYEcI{ZaO#t$pmfc;k3oF^7zx18w0M zBmEANXHwI@#(nIZen-RYwsLES=SQms=41zz-Rs^Z%YK#}=iP2{%&DE`|KPFbTFK@` z_tt+m5AR)e_UHTjnuZQ-_sST5lf*_B#|o*u%z2x16waq+o7FvAw~uk79c58h>T6}? zn5OT!)U#yC at L_2p_w?v#ttNX5ulr}d`u6(Au%7jAcJ!Gv#0Qxj%qbhyR<(`3V{N8I zb?4M;ch+x;KkMUhD&u_;^?>+GjaL8t%6D29hZfB{M$Pm37+*T?QA?#}*O?ugz8Y0M zkPA9y<@;EN^QO1&>0Z;UX$M_T>*ntK?pHw2T%c^6tUF0g;jLEGk>`arZwKvnK*Iak zEc8Cq(whiH$9elLTxaj#?Z>;t+fOGVl-^r$vQCl21FIUpyWLjzcgNXnn3p=^ae74D>ojiJO^{~)i?PF*awO&)EF(I*|MGy7CYICfYf z?&Xg+^<<+kYlHnv<1x>Tgs$If?28VZ&aNP*OJyHD{vhZg<(=BaQ!CW1Qd}G#Z!EjR z`SI58>Sx1qCTU8`Z>~?Ry?SEm zl_NxNMy+-AjRIfPvhhun`?vM&53ZYKpDG*HQ at -NI1Zf?=uWbgVl`fiuR at 2bg*)79A zcRjgcT&di(I&1q3-TA6UPraG8)YJ#Ff3@q>$bbWwEXwm-Za!Y&WlX^GHEtX^~M1s<9a%*@^f6j&Loz7`S7TsQRO(tbjNWWmrEuuw`*PKwc7u%+dRpx z*eBw7$g1HB(py7 at U$n~Bc^x}@PL4&_`VSQ6_H+N|hg)jkd`VSXVNe-igYI zj6QjymlWImI(NMaeQT74+b{=jSEC(W2bEMRCRn)EJ7kVL*G$+m8^7K*$tut8RjIcz zj4kJQ^`*Vl at 0EEMRJp{HQc0{nN4 at USE@p5l+Q;gHbzz0XFwe!B-cwH3N59wV1b=dE zop8%pUHo`ZN3uS_Dkq;jZ>C*m_e7OrceXs4ZdCkM>)^0M*VCU>E>$a!H8g8@`{?3C z!{R5Wymd7S^{Na at Rj>EXx#Jo-{;FlN8$HT0+Oah#*>*}*NxVvO at vC`;;W}Yk+H_C9 zDWw_ot{l6}yvROiRsNR7$TIl_>AQEuT4Y-JJ&)gIdlz#$>=6d$h1j!WaUj|pe?;@0 z`EVuopbv{!B(%U&zRQ6+(6aepW0$ z`vAHl-NW8Hz=X}P!}p+@)BH^6ZdNol$)9aS3o#GyGY?>r0=(?#{#19aH7n4Fj=wY8 z49-PRk}v$vLA>{kd+^WMh={ECC$T>!gnuGDehwZ#haZEG4Z=Tzg-CpE1T}XD@?9yCFFN*~c6_{9 zf&F1HJ{A at PJLC!?!7KwfP%*GjTn88{K9FF#415f+w_s!Srn|8NXuQ|bgMH&c%#1-l zrWezR>B972Iv`&NrUT25<;C(K at Qwhs2MyN&&Vq-)Z`d4u9JrAmoe#jP at DMl{9s$0_ zf_|#~4{69i$CjYXl)n-Z(QkCTuz#<92i|?<-6YHXRfu zF_|pfFD(t%+Ec8IF>bxW%Gj1-h{st`t&QzTwkxr)q5Vo5V}4pgs--2#-WU&)EP2Ht zW)AN=V{Kgq$Q8mKFUe?E90}R;!>TM2GUJCkqe)1gA3hyPLc08LX#@$0h>D3z43m_S z9zH at wR&Jy`L1EPBF=G|SDJhSiFmcjkqRN!1s?(;csn3`>Yqo~w9Id(9I=b`p<}X;d zXtDm1rOOORWJ4ok6H_yaxrL>bHPvRht)2Y}2S=xst5!R&aarq1b948gdwO{@*7-1f z{aF6%*&J>_U{G*K=!T7(Hg7>|HD at F2#qn!_76UE8=1Pn&Mz}ZofmJhLo}k0*gv}Ie ze9k1|ZI?yE`95U{zHA5Q;50m`9iqU9nf{Fgn-8eq4}^7;POqRs+(J zphmp83!B4;_{>JcXD&KEr(z{qLI|7PJ at 7bBT(2*l)*D*Gfe;O!k$E#9HqT*mZ~*SR zelQ0S;eQ(5@<3d}fG$kVz$G-u$3UU{beLBU$Gx7XpMlQ~49H2tYs)K>2&Dz!`Y|~+ z-?Q*iF_}M{%>EoajhY~#;H~6o^@Ugtj=? z*D at MjvY?+d at U~%niOuQQTt!6caLsWC#6~ml-ovcGy}wR_`;+#!J>`ej5wjlNb%%}D zg*_yv5nd;3CTHO76pXoGytVts+YfSZar^MYd&>*jN`w;N+KylSFs(f8OyIoM3Py at 2 z at g3Bo!sX2U7rXlFc+&^Vfdqxt{E at e9IR_-8-+yYlq@3*})}>hWJJZ(e_S z{`g2?dIKRn8()E7I{wac1im<+QJ_!vzfk|RxK=s}#%n%@Z&5sijhtW<|62dKXyG5n zn+2uO`8DSGGm9_BX2C$*8vkDJP&>kEKxW}<^ ztsebzfX~0`|F77~(2_jx{-*J at 3-k{7A=alX+#b~NdDI)f<`D4}H+_gT{M8?|293{u zUOy}a&jztE^TjcDNd2 at 1jn99s7h7#(_XYxuoWIzw)}Z;?6CVSf_wcSb{(u>HP9lDF z^2cM?I2P1`m&apBUtkAzKNnU^#$St|4jlX3cuNr(PZ0|o+gGCnoPfn2B;&tzjs-pn ziwBPF18OWJBW(YVKX7GCDe&>Y5(~&^0&oN1ldyQ;lYuSBC*v!emOL_20p59ljIfWN z)&a-XuGzptfo}!g2Ye&&- at u)LTj9RJ#;?=Z^%rZK13vc)W(7ku3r-^R+|bpPC4{g$ zsDb;Z;rxEk#7~6u@%tYZ^eL9gmw)Z$<;HaR;Jq0H|1af_&Cn8z`#nMh*KIRI+akQp z>i8WSW|h1h80vljOeO?}VgE$1-{*=AL&HSn#f^=PC9q?j7Q#OgtWI2D<0L7C=4)v= z+DIZqBw|B`q+GpS;Xo+ZKNt=v>~#W?ND6;T1@@zKERnlA2d%I-(O3XsjL+4x#Mj;{ ztSN?6BV%M2!h!o4!RJ)9bo3B*FK7h)N`kT}f4LXH4ha}`!5*iH5Ekc$(*}iiKv;?& z-Z?0oFescjD4aAXoIEI;GALXjIuNcL6vm!ONb~d|!QQO at n-Ah&PWArtud=_MW`Fsm z`p|Rh-<(5JgbzAJ{=;){{)j=R=zn+$@)G~2yZ`@yArxAtQv_U44aXt3-*ZZf~!L-Bvw-hp-t$MANrhJ{B&ZjFkLiQN{rJ$}c| zgv6xel+;~m=^4BC?A^CNGb=miKyF at sLE*uo;*!!sWrxd;96eT1Syf$Ad;CP*$@+%I zQ>V|IJ$L at X#Y>m3T)o!R+|t^1{l?AqTet7ry?6gX$HUI9N8OK~Jbl*l{Kd;xuiw1w zefR#u$4{TXeEs(QN8it1zyI40VE*|5%YXC$=6|~V|I_9FFMj~`&+`8x`ai&r!KHg> zaJV!s_L)k=$36@{FAtV4LfDs at 86Gqb;&dXHn1#rDJd{}HcjUT3YvKj6) zB#;pY$%unwOjaPx2iCy>bT*fz0nu=u$JW5UV~WfDu{YK5!+xRIyG?>LLg_Rn&CdgQ zv;4fENbEE55N6_WUf7qIiEeBLmxg~^m+t3<$GeBp at n^~cWr7qmUpiiu01igbR;UXn zh=D)+<^<6_7#wd6Plg|^gX2%bK256;NcW?M2GE(99zk3HJHRV|7SfM-?IGgr>5rqc z at o$^x%1MZIl4X|FN z4XHDoN#xL&fiw>mdmzp|Udgu}s((U7;uG9J+=Zn-`N1%hP`Y=a%SVEl+<1&dKN? z&M7DZ=jJFL=c^DG=XS^m=k~}F=cdSjuUD7PRe_t>lF?|K8zO{rOVnF~aXZur+{l=O zuHxJf)!^I`9mKgE%BbO$9}ApfZ-+Jlx2D=6Z$7ujxi!)UZeveI>R5U^J2VdGWF&=i zOZ2fC<5bj%b3=3%=SHXk=O!o{=f)@==cXta=Vr(o=T^uG=hnyw=MjJm2rJ&h(eJs!f;HyX` zNFKNa^e5uukCi;!3EbYEiq{t_ucea58!LJBtl`&JMn+jiQeID7MoL3mOk74 at UTUHg z)zAi!fUNuHPV4^p)RZpRi{OaA4bF~>fB6}U$39rk`${&&&lCHsF(z>2<*}r3xFmNj zBam*x^2a`84{-cnMM!T$b1 z{{BOr=NKX?vIbg#?2#&9dn+5bTludaVO?b8+Caviq1f6P+0mCrO3R)knru6ML3i&# zqs`cjgyAA*tlv1!>);aZ&T{r=v#=iIIP)%~&hEUAi90hHZqC?C0Ry-UCdb*GHL{vK|F%Fh0Ykj-)t)F~qRw{`pV`#1LJ58dE%0e%=`@JBf{mM4GA z$H7AK?CpQfUmE{s`Ha**%YW?pvpl0>&AE-TLSDf4KSQ{6TmAT)&5R4f?}x28D&w<=h{T z9lHO|>3(z$$RxWA{;^(_gJh=$g at xPK^M+S{?8qM!u7309c3(;Nd8Hb z)L+(I2>+9wXJVv3mZLImsJul;zCmfIT)6!b;|J=|s{GIOa2)^Va(WH^EPrqIXZcZ^ zq4L4w-r at -B7RP^<6P-v%)#=arAN-5larIESu%CZ+{_{ADr2TXKPPma!liQ!m5Ayx9 zej~p>%g3|+EdO!w&*j&?`LmtO=>4<)`gebp2YvXn{+_-+%PW8Wx&FJQ$$ykzQTnr- zGV#yyvPplIpCJyF3)|15X at AbYb2?9s9qtgWxiD}YcDXEy)U`xW&!9Nr;wml; z#GP#tMXK7O=^?d}?0vi1Ed`nmqzohl^!2(ZdJ5DIR1dTh%7_5s0(k)0 z0_g*30mVTXlYq+uA*@}{CwGCa0#yMO03`x#1Y!cEfxhFAXD4tj&?+EnjKi34fx>{| zf%1Urf$jsn0{RZ5&?<^%0_g);133fv18oM{36u>~1#}VUG0<0_(QVMTKqf#AKun-$ zpk$z8pwmECfo=o60Qw9h4)sw0ngFB?WN at 7~hQIh at X#@Rl1f&O417lnalnN9J6b$4I zv1=L at oi`os9k6A)A@&@PJ|x}-?jb;CNVPJwx75+r^k6a(;)vVv?m&!r??K+k&uPTK zUUto5hd_=eB5+4y!eG)#TrQj87Qm%*Al6aListXYpa)SsF&Wy%&xrw$_%r;xNZ8-2 z4Vf$s-3(UbkY*iL8kWGG;Y&x4kr9hy1X(a-&+3mC6*V1Fr`SQ7NvK_Xg`e+`lC30| z94?zl$1WRQVpai6E`uBbT3L<^54xc at jg5>&`S)fd4-YoJS6pArl11~dWVo?ucpI-H zBRZ2VD5Fu#mQM4)l_O6fKK&>j`Kih6ILr5HiN}xa6=54G^__2u~rxcu-SCK{w?%p z2gM2U?#A-+fItJv8?Y}dAJkyfK&=MV9YH;OS%E{WX)$js7=gU}*h{!6eqgkuzsB2P zPwFr$8vR%8aPw?oY-?@YZ^?3^1NX@!G#eiymOJ(gP+KgB;}4p+o+OkY3av3>dQrG^ zUwe8mlp6#wejWp3Ko_+IOQ3P+MutcQ(*nE<88JBi*h}IE()EejvuS=D>_zVUNGM6v zhEDexNOK<<`+3;VplF0PW4d_T9<1olFn6e#93s=b7=HW|d%*@hdH4Ldts+DaTjB2k z1 at IpPpogM%-mD-2^5u^L4`btQ0d at C+J7GE-A7s$bBZApE3 at L}jCZX}7R;>QV3TPiz zBOb7Y;X(66ZCIQ^*64xBZD^P=AU57h{Eq8SqK5YmFB1F|>sww?Xoa1ztzcyI1ak_7 z%;19SBJ`B+ZM;V!2o+#CvEeXeF)@#?5yb#C0GrFUU^23g75055Y^<1&Ly%8jFf*_) z3$o`UI~J4SL1qVFPb5u^DEw+LvA$UzKxc>GyT?h`vH)W6>B)i~f_#U{vF=3HSe&t6 zfG;G#B3v;{^{4ys at g@ul_NL=G7T`MZYVgx6!D4vLg%}bt9l-pGkc=EiHiBxRNg^;? zGnvN0ba$9(NyyI3*ivBROk~Jf;m7b}a2Yh-b7Xml;T1>m<9!wZp`{Q<8I*>_r1SHK zi1yncHq(+g{c~=!s2QE+k55Z*$;QIGt at YDH1WD{!0~rxg9Td--(FB at a31-JZPrAq< zg86TU80I0E1AEvo{ORa}AhkXAS%x>@FPQPdB*Ormc65Aq5JC&ZEa|jB`d{LA;d3LD z%9}``|IgzViJJsJrVVe6EuGh%al*4F%p*!7gMUVJ8T*X{7 at J_akH+Uj-a{ozh98W` zbzCD1`=A!WN|ra%)7abqe at x(r#KkBazE&37gpU1^0%kTk2lJ;!aeF%3m*F=siNZAq zu58|Tr96y--Rb_AUg#S9nv;*s!v8FWl8oqX0bX8owxEO;(3gYTL68t!4((a}*GQBh z=)qMiKl+gNu<>@=U~%ZJCS}7`=f5r z0iO~wM3SF}9jtc at Spm@aR{q>3Y^+2P6B?JsB>DSe)?=dPL*dL{GN0~H+bHS&zv4GlluC at 6|ldC34_+6j>wK at H6Hwh zC^8qUGog8ymcPB7{MQ3+lYft!$NQ#;44;ehd7myB^#b+i^045*(%;bKrN{WcrMF{w za)Y2@&Q=U}Hj4wunYR+)IP*t|znpYlfj>s4!PNtMA6p&h!S-O5>hkv8?$hP%!`-OM z+qdhX%iEu82^{-OmOgMr;Hto}yOq&cJrMS(GBWzALq<*G3!4 zP~a05ufmU?$Hyc-HsoU)ejZzXyd9rA^7XIem(AqIW4}Eo0%6lT_FDrszplgh>BIPS z4(Ia-zC4mI-^$mM1cdF#&*bAAetq)z&OG<>MrIeIe+x|2>ENKOQ_e z at gI->ZB_hVdi?X|PoDgrHz)qj*8Bg>>3dgp~8fsRSU|lM!-}9*u-^d zst5-`S+pD$F|`&iX>8JmCT&w6#3F=^52TV-328Lgq-pwKQj!uwh#@}MNKyh#+pH1k zZohwK&YsKe0c=Tn8}iNNKQr^s<^Ru~fwLbcJ9&B;^}p^N?^PW7Jr$Sz%k-%Lo zi2C36-0OLs{n#_vQ{&wK$xA#}g%6Ud at i)g6wr~8<`1;iPKa5S>eaM*aq-aThhoXD` zWOsnw1MUKMfH7bc7y-TmZURFj0B{-T2F?Q?0_T7Zpb-cH z+kka|0;~Wsfw_y&0Zak+fIGk_a0~bbxDH$e`hg3;Ip73vRMxKtfSQ0A7GXJ#=dNMI2Jw3j zHy{6M94qL&REm_&PDjNt!&J}3xrffB$TJzeo7xRIonMBK()ncsDb=4$(~nCeDRK&V zriIhx^x3r9LB7F3UhW_dI>;L#@9??#zk!vc+n6KfBT&oavFJmczOzHmp`)p!jg5AD-o87o^gPlElmIVR@a^m!16W>FIP9kW#_^UG1| zJ*zxoe!0;opI<#`^?m}m$Ro?r>EmlCwU*0#wojK+?6d1rnAka*CH4&aDf2lVT~2Y1 zZGMl)6oie0$VLE}-&?KA5eN3z)>D{XT(`+M*`>@6Ms-`I*rpE#KL<-sx*U~sP$DAQ zNvpo3N60k69YR3&3qnjOu(cDGucva3rg0A<}7K zspfqYW&QSI(#I`75$7q7W4g5p8!~J&oj%#Cp|4Dj4coNo^YT6FBM;DZNlm0Rw=Zd% zHhtOLCgAKLU7z#0{8hza3g&le+ z&sKR-?K8EV at OWYKA>>g=;}6p*?c|4z at O8>Zhpm1ePx^wTUGgK&{9$L$Pp7tf%tkc(J8U_7z;f86 zo1P(0n;7KwNOrPT&O&vJ^Y#(1P?O}{tCi(dy!H_rpPbJ27vW4=t(;+StK^p>)~sEe zYs=uj6Z~I-%PGQHgajwlRz8C|&OoTwHFJ*%emrCT)^QhFCYzZxfcd_F`JQ3F^ZA}K zE at sf%daUwEtY0JRWv)!I9*IPY?MXWWee|M_EPfcEI^slN-Jh%_-Jcc7{n^b|LNJsO z4T|ysun8zU&=Q_?j_PUF4cJXok>^RPgEMOXEh2wfGJZ5ZBX~6XqTMvC1 zd`z8eyJS^VIG0!>-!Uc0nw4q;OX;IA*sX2@^Z606z8#e8j=Fa^*@a;JY)JYod3mkm zPZPbhN)|p9S9Fi8lgyI`dEb;f&U6k?!~dOaQRC~Nv$koRkGjq at qX6fHJDhUISlpe! z+55Q_VJ*9`md~=E+&VXR8~W4OJ`?Ae$~jx<+pNsQT_&VHTaPa3A2ICdl-wU at yR+*s zS>3#TQ0ceZmU{_p>w$x9>1(=OvQ}!Gx$gkKr?*+|Ec4%yk2&qhB7I-nw^#G~_LRx@ zi)%Ga4IgMeu5^T(TeOn=NNdX`t*%iG*J^9S`ujC&>yCss)oS6wc3-{{u5D>*P&NG} zo05EA;kx|I&n#Dz;LGb2URV3 z(R^9FP>X at QrnaliN;AbJ`L$X_OZ$<-YHPj{Y4X?6i(4i64Ykdhn!mZY$f2>JyvSL5 gagk-L#YM?Uqg-61ujl4Uf2Duhwu)V4!Jy3i0g*_Gf&c&j diff --git a/distutils2/compat.py b/distutils2/compat.py --- a/distutils2/compat.py +++ b/distutils2/compat.py @@ -1,10 +1,10 @@ -""" distutils2.compat +"""Compatibility helpers. -Used to provide classes, variables and imports which can be used to -support distutils2 across versions(2.x and 3.x) +This module provides classes, variables and imports which are used to +support distutils2 across Python 2.x and 3.x. """ -import logging +from distutils2 import logger # XXX Having two classes with the same name is not a good thing. @@ -20,7 +20,6 @@ _CONVERT = False _KLASS = object -# marking public APIs __all__ = ['Mixin2to3'] @@ -31,6 +30,7 @@ yet does nothing in particular with it. """ if _CONVERT: + def _run_2to3(self, files, doctests=[], fixers=[]): """ Takes a list of files and doctests, and performs conversion on those. @@ -38,22 +38,20 @@ - Second, the doctests in `files` are converted. - Thirdly, the doctests in `doctests` are converted. """ - # if additional fixers are present, use them if fixers: self.fixer_names = fixers - # Convert the ".py" files. - logging.info("Converting Python code") + logger.info('converting Python code') _KLASS.run_2to3(self, files) - # Convert the doctests in the ".py" files. - logging.info("Converting doctests with '.py' files") + logger.info('converting doctests in Python files') _KLASS.run_2to3(self, files, doctests_only=True) if doctests != []: - logging.info("Converting text files which contain doctests") + logger.info('converting doctest in text files') _KLASS.run_2to3(self, doctests, doctests_only=True) else: # If run on Python 2.x, there is nothing to do. + def _run_2to3(self, files, doctests=[], fixers=[]): pass diff --git a/distutils2/compiler/__init__.py b/distutils2/compiler/__init__.py --- a/distutils2/compiler/__init__.py +++ b/distutils2/compiler/__init__.py @@ -1,11 +1,26 @@ +"""Compiler abstraction model used by distutils2. + +An abstract base class is defined in the ccompiler submodule, and +concrete implementations suitable for various platforms are defined in +the other submodules. The extension module is also placed in this +package. + +In general, code should not instantiate compiler classes directly but +use the new_compiler and customize_compiler functions provided in this +module. + +The compiler system has a registration API: get_default_compiler, +set_compiler, show_compilers. +""" + import os import sys import re +import sysconfig -from distutils2._backport import sysconfig from distutils2.util import resolve_name -from distutils2.errors import DistutilsPlatformError - +from distutils2.errors import PackagingPlatformError +from distutils2 import logger def customize_compiler(compiler): """Do any platform-specific customization of a CCompiler instance. @@ -14,10 +29,10 @@ varies across Unices and is stored in Python's Makefile. """ if compiler.name == "unix": - (cc, cxx, opt, cflags, ccshared, ldshared, so_ext, ar, ar_flags) = \ + cc, cxx, opt, cflags, ccshared, ldshared, so_ext, ar, ar_flags = ( sysconfig.get_config_vars('CC', 'CXX', 'OPT', 'CFLAGS', - 'CCSHARED', 'LDSHARED', 'SO', 'AR', - 'ARFLAGS') + 'CCSHARED', 'LDSHARED', 'SO', 'AR', + 'ARFLAGS')) if 'CC' in os.environ: cc = os.environ['CC'] @@ -68,19 +83,16 @@ # patterns. Order is important; platform mappings are preferred over # OS names. _default_compilers = ( - # Platform string mappings # on a cygwin built python we can use gcc like an ordinary UNIXish # compiler ('cygwin.*', 'unix'), - ('os2emx', 'emx'), # OS name mappings ('posix', 'unix'), ('nt', 'msvc'), - - ) +) def get_default_compiler(osname=None, platform=None): """ Determine the default compiler to use for the given platform. @@ -101,17 +113,19 @@ if re.match(pattern, platform) is not None or \ re.match(pattern, osname) is not None: return compiler - # Default to Unix compiler + # Defaults to Unix compiler return 'unix' +# compiler mapping +# XXX useful to expose them? (i.e. get_compiler_names) _COMPILERS = { 'unix': 'distutils2.compiler.unixccompiler.UnixCCompiler', 'msvc': 'distutils2.compiler.msvccompiler.MSVCCompiler', 'cygwin': 'distutils2.compiler.cygwinccompiler.CygwinCCompiler', 'mingw32': 'distutils2.compiler.cygwinccompiler.Mingw32CCompiler', - 'bcpp': 'distutils2.compilers.bcppcompiler.BCPPCompiler'} - + 'bcpp': 'distutils2.compiler.bcppcompiler.BCPPCompiler', +} def set_compiler(location): """Add or change a compiler""" @@ -127,8 +141,8 @@ from distutils2.fancy_getopt import FancyGetopt compilers = [] - for name, cls in _COMPILERS.iteritems(): - if isinstance(cls, str): + for name, cls in _COMPILERS.items(): + if isinstance(cls, basestring): cls = resolve_name(cls) _COMPILERS[name] = cls @@ -139,7 +153,8 @@ pretty_printer.print_help("List of available compilers:") -def new_compiler(plat=None, compiler=None, verbose=0, dry_run=0, force=0): +def new_compiler(plat=None, compiler=None, verbose=0, dry_run=False, + force=False): """Generate an instance of some CCompiler subclass for the supplied platform/compiler combination. 'plat' defaults to 'os.name' (eg. 'posix', 'nt'), and 'compiler' defaults to the default compiler @@ -162,9 +177,9 @@ msg = "don't know how to compile C/C++ code on platform '%s'" % plat if compiler is not None: msg = msg + " with '%s' compiler" % compiler - raise DistutilsPlatformError(msg) + raise PackagingPlatformError(msg) - if isinstance(cls, str): + if isinstance(cls, basestring): cls = resolve_name(cls) _COMPILERS[compiler] = cls @@ -200,26 +215,24 @@ pp_opts = [] for macro in macros: - if not (isinstance(macro, tuple) and - 1 <= len (macro) <= 2): - raise TypeError, \ - ("bad macro definition '%s': " + - "each element of 'macros' list must be a 1- or 2-tuple") % \ - macro + if not isinstance(macro, tuple) and 1 <= len(macro) <= 2: + raise TypeError( + "bad macro definition '%s': each element of 'macros'" + "list must be a 1- or 2-tuple" % macro) - if len (macro) == 1: # undefine this macro - pp_opts.append ("-U%s" % macro[0]) - elif len (macro) == 2: + if len(macro) == 1: # undefine this macro + pp_opts.append("-U%s" % macro[0]) + elif len(macro) == 2: if macro[1] is None: # define with no explicit value - pp_opts.append ("-D%s" % macro[0]) + pp_opts.append("-D%s" % macro[0]) else: # XXX *don't* need to be clever about quoting the # macro value here, because we're going to avoid the # shell at all costs when we spawn the command! - pp_opts.append ("-D%s=%s" % macro) + pp_opts.append("-D%s=%s" % macro) for dir in include_dirs: - pp_opts.append ("-I%s" % dir) + pp_opts.append("-I%s" % dir) return pp_opts @@ -258,7 +271,7 @@ if lib_file is not None: lib_opts.append(lib_file) else: - compiler.warn("no library file corresponding to " + logger.warning("no library file corresponding to " "'%s' found (skipping)" % lib) else: lib_opts.append(compiler.library_option(lib)) diff --git a/distutils2/compiler/bcppcompiler.py b/distutils2/compiler/bcppcompiler.py --- a/distutils2/compiler/bcppcompiler.py +++ b/distutils2/compiler/bcppcompiler.py @@ -1,8 +1,4 @@ -"""distutils.bcppcompiler - -Contains BorlandCCompiler, an implementation of the abstract CCompiler class -for the Borland C++ compiler. -""" +"""CCompiler implementation for the Borland C++ compiler.""" # This implementation by Lyle Johnson, based on the original msvccompiler.py # module and using the directions originally published by Gordon Williams. @@ -10,10 +6,11 @@ # XXX looks like there's a LOT of overlap between these two classes: # someone should sit down and factor out the common code as # WindowsCCompiler! --GPW -import os -from distutils2.errors import (DistutilsExecError, CompileError, LibError, - LinkError, UnknownFileError) +import os, sys + +from distutils2.errors import (PackagingExecError, CompileError, LibError, + LinkError, UnknownFileError) from distutils2.compiler.ccompiler import CCompiler from distutils2.compiler import gen_preprocess_options from distutils2.file_util import write_file @@ -50,12 +47,8 @@ exe_extension = '.exe' - def __init__ (self, - verbose=0, - dry_run=0, - force=0): - - CCompiler.__init__ (self, verbose, dry_run, force) + def __init__(self, verbose=0, dry_run=False, force=False): + CCompiler.__init__(self, verbose, dry_run, force) # These executables are assumed to all be in the path. # Borland doesn't seem to use any special registry settings to @@ -79,18 +72,18 @@ # -- Worker methods ------------------------------------------------ def compile(self, sources, - output_dir=None, macros=None, include_dirs=None, debug=0, + output_dir=None, macros=None, include_dirs=None, debug=False, extra_preargs=None, extra_postargs=None, depends=None): macros, objects, extra_postargs, pp_opts, build = \ self._setup_compile(output_dir, macros, include_dirs, sources, depends, extra_postargs) compile_opts = extra_preargs or [] - compile_opts.append ('-c') + compile_opts.append('-c') if debug: - compile_opts.extend (self.compile_options_debug) + compile_opts.extend(self.compile_options_debug) else: - compile_opts.extend (self.compile_options) + compile_opts.extend(self.compile_options) for obj in objects: try: @@ -110,9 +103,9 @@ if ext == '.rc': # This needs to be compiled to a .res file -- do it now. try: - self.spawn (["brcc32", "-fo", obj, src]) - except DistutilsExecError, msg: - raise CompileError, msg + self.spawn(["brcc32", "-fo", obj, src]) + except PackagingExecError: + raise CompileError(sys.exc_info()[1]) continue # the 'for' loop # The next two are both for the real compiler. @@ -132,72 +125,53 @@ # Note that the source file names must appear at the end of # the command line. try: - self.spawn ([self.cc] + compile_opts + pp_opts + - [input_opt, output_opt] + - extra_postargs + [src]) - except DistutilsExecError, msg: - raise CompileError, msg + self.spawn([self.cc] + compile_opts + pp_opts + + [input_opt, output_opt] + + extra_postargs + [src]) + except PackagingExecError: + raise CompileError(sys.exc_info()[1]) return objects - # compile () + def create_static_lib(self, objects, output_libname, output_dir=None, + debug=False, target_lang=None): + objects, output_dir = self._fix_object_args(objects, output_dir) + output_filename = \ + self.library_filename(output_libname, output_dir=output_dir) - def create_static_lib (self, - objects, - output_libname, - output_dir=None, - debug=0, - target_lang=None): - - (objects, output_dir) = self._fix_object_args (objects, output_dir) - output_filename = \ - self.library_filename (output_libname, output_dir=output_dir) - - if self._need_link (objects, output_filename): + if self._need_link(objects, output_filename): lib_args = [output_filename, '/u'] + objects if debug: pass # XXX what goes here? try: - self.spawn ([self.lib] + lib_args) - except DistutilsExecError, msg: - raise LibError, msg + self.spawn([self.lib] + lib_args) + except PackagingExecError: + raise LibError(sys.exc_info()[1]) else: logger.debug("skipping %s (up-to-date)", output_filename) - # create_static_lib () - - def link (self, - target_desc, - objects, - output_filename, - output_dir=None, - libraries=None, - library_dirs=None, - runtime_library_dirs=None, - export_symbols=None, - debug=0, - extra_preargs=None, - extra_postargs=None, - build_temp=None, - target_lang=None): + def link(self, target_desc, objects, output_filename, output_dir=None, + libraries=None, library_dirs=None, runtime_library_dirs=None, + export_symbols=None, debug=False, extra_preargs=None, + extra_postargs=None, build_temp=None, target_lang=None): # XXX this ignores 'build_temp'! should follow the lead of # msvccompiler.py - (objects, output_dir) = self._fix_object_args (objects, output_dir) - (libraries, library_dirs, runtime_library_dirs) = \ - self._fix_lib_args (libraries, library_dirs, runtime_library_dirs) + objects, output_dir = self._fix_object_args(objects, output_dir) + libraries, library_dirs, runtime_library_dirs = \ + self._fix_lib_args(libraries, library_dirs, runtime_library_dirs) if runtime_library_dirs: logger.warning("don't know what to do with " "'runtime_library_dirs': %r", runtime_library_dirs) if output_dir is not None: - output_filename = os.path.join (output_dir, output_filename) + output_filename = os.path.join(output_dir, output_filename) - if self._need_link (objects, output_filename): + if self._need_link(objects, output_filename): # Figure out linker args based on type of target. if target_desc == CCompiler.EXECUTABLE: @@ -218,10 +192,10 @@ if export_symbols is None: def_file = '' else: - head, tail = os.path.split (output_filename) - modname, ext = os.path.splitext (tail) + head, tail = os.path.split(output_filename) + modname, ext = os.path.splitext(tail) temp_dir = os.path.dirname(objects[0]) # preserve tree structure - def_file = os.path.join (temp_dir, '%s.def' % modname) + def_file = os.path.join(temp_dir, '%s.def' % modname) contents = ['EXPORTS'] for sym in (export_symbols or []): contents.append(' %s=_%s' % (sym, sym)) @@ -229,13 +203,13 @@ "writing %s" % def_file) # Borland C++ has problems with '/' in paths - objects2 = map(os.path.normpath, objects) + objects2 = [os.path.normpath(o) for o in objects] # split objects in .obj and .res files # Borland C++ needs them at different positions in the command line objects = [startup_obj] resources = [] for file in objects2: - (base, ext) = os.path.splitext(os.path.normcase(file)) + base, ext = os.path.splitext(os.path.normcase(file)) if ext == '.res': resources.append(file) else: @@ -260,7 +234,7 @@ # them. Arghghh!. Apparently it works fine as coded... # name of dll/exe file - ld_args.extend([',',output_filename]) + ld_args.extend((',',output_filename)) # no map file and start libraries ld_args.append(',,') @@ -276,11 +250,11 @@ ld_args.append(libfile) # some default libraries - ld_args.append ('import32') - ld_args.append ('cw32mt') + ld_args.append('import32') + ld_args.append('cw32mt') # def file for export symbols - ld_args.extend([',',def_file]) + ld_args.extend((',',def_file)) # add resource files ld_args.append(',') ld_args.extend(resources) @@ -291,27 +265,25 @@ if extra_postargs: ld_args.extend(extra_postargs) - self.mkpath (os.path.dirname (output_filename)) + self.mkpath(os.path.dirname(output_filename)) try: - self.spawn ([self.linker] + ld_args) - except DistutilsExecError, msg: - raise LinkError, msg + self.spawn([self.linker] + ld_args) + except PackagingExecError: + raise LinkError(sys.exc_info()[1]) else: logger.debug("skipping %s (up-to-date)", output_filename) - # link () - # -- Miscellaneous methods ----------------------------------------- - def find_library_file (self, dirs, lib, debug=0): + def find_library_file(self, dirs, lib, debug=False): # List of effective library names to try, in order of preference: # xxx_bcpp.lib is better than xxx.lib # and xxx_d.lib is better than xxx.lib if debug is set # # The "_bcpp" suffix is to handle a Python installation for people - # with multiple compilers (primarily Distutils hackers, I suspect + # with multiple compilers (primarily Packaging hackers, I suspect # ;-). The idea is they'd have one static library for each # compiler they care about, since (almost?) every Windows compiler # seems to have a different format for static libraries. @@ -331,43 +303,35 @@ return None # overwrite the one from CCompiler to support rc and res-files - def object_filenames (self, - source_filenames, - strip_dir=0, - output_dir=''): - if output_dir is None: output_dir = '' + def object_filenames(self, source_filenames, strip_dir=False, + output_dir=''): + if output_dir is None: + output_dir = '' obj_names = [] for src_name in source_filenames: # use normcase to make sure '.rc' is really '.rc' and not '.RC' - (base, ext) = os.path.splitext (os.path.normcase(src_name)) + base, ext = os.path.splitext(os.path.normcase(src_name)) if ext not in (self.src_extensions + ['.rc','.res']): - raise UnknownFileError, \ - "unknown file type '%s' (from '%s')" % \ - (ext, src_name) + raise UnknownFileError("unknown file type '%s' (from '%s')" % \ + (ext, src_name)) if strip_dir: - base = os.path.basename (base) + base = os.path.basename(base) if ext == '.res': # these can go unchanged - obj_names.append (os.path.join (output_dir, base + ext)) + obj_names.append(os.path.join(output_dir, base + ext)) elif ext == '.rc': # these need to be compiled to .res-files - obj_names.append (os.path.join (output_dir, base + '.res')) + obj_names.append(os.path.join(output_dir, base + '.res')) else: - obj_names.append (os.path.join (output_dir, - base + self.obj_extension)) + obj_names.append(os.path.join(output_dir, + base + self.obj_extension)) return obj_names - # object_filenames () - def preprocess (self, - source, - output_file=None, - macros=None, - include_dirs=None, - extra_preargs=None, - extra_postargs=None): - - (_, macros, include_dirs) = \ + def preprocess(self, source, output_file=None, macros=None, + include_dirs=None, extra_preargs=None, + extra_postargs=None): + _, macros, include_dirs = \ self._fix_compile_args(None, macros, include_dirs) pp_opts = gen_preprocess_options(macros, include_dirs) pp_args = ['cpp32.exe'] + pp_opts @@ -387,8 +351,7 @@ self.mkpath(os.path.dirname(output_file)) try: self.spawn(pp_args) - except DistutilsExecError, msg: - print msg - raise CompileError, msg - - # preprocess() + except PackagingExecError: + msg = sys.exc_info()[1] + print(msg) + raise CompileError(msg) diff --git a/distutils2/compiler/ccompiler.py b/distutils2/compiler/ccompiler.py --- a/distutils2/compiler/ccompiler.py +++ b/distutils2/compiler/ccompiler.py @@ -1,18 +1,15 @@ -"""distutils.ccompiler +"""Abstract base class for compilers. -Contains CCompiler, an abstract base class that defines the interface -for the Distutils compiler abstraction model.""" +This modules contains CCompiler, an abstract base class that defines the +interface for the compiler abstraction model used by distutils2. +""" - +import os import sys -import os -import re from shutil import move - -from distutils2.errors import (CompileError, LinkError, UnknownFileError, - DistutilsPlatformError, DistutilsModuleError) +from distutils2 import logger from distutils2.util import split_quoted, execute, newer_group, spawn -from distutils2 import logger +from distutils2.errors import (CompileError, LinkError, UnknownFileError) from distutils2.compiler import gen_preprocess_options @@ -75,15 +72,15 @@ # what language to use when mixing source types. For example, if some # extension has two files with ".c" extension, and one with ".cpp", it # is still linked as c++. - language_map = {".c" : "c", - ".cc" : "c++", - ".cpp" : "c++", - ".cxx" : "c++", - ".m" : "objc", + language_map = {".c": "c", + ".cc": "c++", + ".cpp": "c++", + ".cxx": "c++", + ".m": "objc", } language_order = ["c++", "objc", "c"] - def __init__ (self, verbose=0, dry_run=0, force=0): + def __init__(self, verbose=0, dry_run=False, force=False): self.dry_run = dry_run self.force = force self.verbose = verbose @@ -116,7 +113,7 @@ # named library files) to include on any link self.objects = [] - for key, value in self.executables.iteritems(): + for key, value in self.executables.items(): self.set_executable(key, value) def set_executables(self, **args): @@ -145,15 +142,14 @@ # discovered at run-time, since there are many different ways to do # basically the same things with Unix C compilers. - for key, value in args.iteritems(): + for key, value in args.items(): if key not in self.executables: - raise ValueError, \ - "unknown executable '%s' for class %s" % \ - (key, self.__class__.__name__) + raise ValueError("unknown executable '%s' for class %s" % \ + (key, self.__class__.__name__)) self.set_executable(key, value) def set_executable(self, key, value): - if isinstance(value, str): + if isinstance(value, basestring): setattr(self, key, split_quoted(value)) else: setattr(self, key, value) @@ -173,14 +169,13 @@ """ for defn in definitions: if not (isinstance(defn, tuple) and - (len (defn) == 1 or - (len (defn) == 2 and - (isinstance(defn[1], str) or defn[1] is None))) and - isinstance(defn[0], str)): - raise TypeError, \ - ("invalid macro definition '%s': " % defn) + \ + (len(defn) == 1 or + (len(defn) == 2 and + (isinstance(defn[1], basestring) or defn[1] is None))) and + isinstance(defn[0], basestring)): + raise TypeError(("invalid macro definition '%s': " % defn) + \ "must be tuple (string,), (string, string), or " + \ - "(string, None)" + "(string, None)") # -- Bookkeeping methods ------------------------------------------- @@ -194,12 +189,12 @@ """ # Delete from the list of macro definitions/undefinitions if # already there (so that this one will take precedence). - i = self._find_macro (name) + i = self._find_macro(name) if i is not None: del self.macros[i] defn = (name, value) - self.macros.append (defn) + self.macros.append(defn) def undefine_macro(self, name): """Undefine a preprocessor macro for all compilations driven by @@ -212,12 +207,12 @@ """ # Delete from the list of macro definitions/undefinitions if # already there (so that this one will take precedence). - i = self._find_macro (name) + i = self._find_macro(name) if i is not None: del self.macros[i] undefn = (name,) - self.macros.append (undefn) + self.macros.append(undefn) def add_include_dir(self, dir): """Add 'dir' to the list of directories that will be searched for @@ -225,7 +220,7 @@ the order in which they are supplied by successive calls to 'add_include_dir()'. """ - self.include_dirs.append (dir) + self.include_dirs.append(dir) def set_include_dirs(self, dirs): """Set the list of directories that will be searched to 'dirs' (a @@ -251,7 +246,7 @@ names; the linker will be instructed to link against libraries as many times as they are mentioned. """ - self.libraries.append (libname) + self.libraries.append(libname) def set_libraries(self, libnames): """Set the list of libraries to be included in all links driven by @@ -312,36 +307,35 @@ # (here for the convenience of subclasses) # Helper method to prep compiler in subclass compile() methods - def _setup_compile(self, outdir, macros, incdirs, sources, depends, extra): """Process arguments and decide which source files to compile.""" if outdir is None: outdir = self.output_dir - elif not isinstance(outdir, str): - raise TypeError, "'output_dir' must be a string or None" + elif not isinstance(outdir, basestring): + raise TypeError("'output_dir' must be a string or None") if macros is None: macros = self.macros elif isinstance(macros, list): macros = macros + (self.macros or []) else: - raise TypeError, "'macros' (if supplied) must be a list of tuples" + raise TypeError("'macros' (if supplied) must be a list of tuples") if incdirs is None: incdirs = self.include_dirs elif isinstance(incdirs, (list, tuple)): incdirs = list(incdirs) + (self.include_dirs or []) else: - raise TypeError, \ - "'include_dirs' (if supplied) must be a list of strings" + raise TypeError( + "'include_dirs' (if supplied) must be a list of strings") if extra is None: extra = [] # Get the list of expected output (object) files objects = self.object_filenames(sources, - strip_dir=0, + strip_dir=False, output_dir=outdir) assert len(objects) == len(sources) @@ -358,7 +352,7 @@ return macros, objects, extra, pp_opts, build def _get_cc_args(self, pp_opts, debug, before): - # works for unixccompiler, emxccompiler, cygwinccompiler + # works for unixccompiler and cygwinccompiler cc_args = pp_opts + ['-c'] if debug: cc_args[:0] = ['-g'] @@ -378,23 +372,23 @@ """ if output_dir is None: output_dir = self.output_dir - elif not isinstance(output_dir, str): - raise TypeError, "'output_dir' must be a string or None" + elif not isinstance(output_dir, basestring): + raise TypeError("'output_dir' must be a string or None") if macros is None: macros = self.macros elif isinstance(macros, list): macros = macros + (self.macros or []) else: - raise TypeError, "'macros' (if supplied) must be a list of tuples" + raise TypeError("'macros' (if supplied) must be a list of tuples") if include_dirs is None: include_dirs = self.include_dirs elif isinstance(include_dirs, (list, tuple)): - include_dirs = list (include_dirs) + (self.include_dirs or []) + include_dirs = list(include_dirs) + (self.include_dirs or []) else: - raise TypeError, \ - "'include_dirs' (if supplied) must be a list of strings" + raise TypeError( + "'include_dirs' (if supplied) must be a list of strings") return output_dir, macros, include_dirs @@ -405,16 +399,15 @@ 'objects' and 'output_dir'. """ if not isinstance(objects, (list, tuple)): - raise TypeError, \ - "'objects' must be a list or tuple of strings" - objects = list (objects) + raise TypeError("'objects' must be a list or tuple of strings") + objects = list(objects) if output_dir is None: output_dir = self.output_dir - elif not isinstance(output_dir, str): - raise TypeError, "'output_dir' must be a string or None" + elif not isinstance(output_dir, basestring): + raise TypeError("'output_dir' must be a string or None") - return (objects, output_dir) + return objects, output_dir def _fix_lib_args(self, libraries, library_dirs, runtime_library_dirs): """Typecheck and fix up some of the arguments supplied to the @@ -426,37 +419,36 @@ if libraries is None: libraries = self.libraries elif isinstance(libraries, (list, tuple)): - libraries = list (libraries) + (self.libraries or []) + libraries = list(libraries) + (self.libraries or []) else: - raise TypeError, \ - "'libraries' (if supplied) must be a list of strings" + raise TypeError( + "'libraries' (if supplied) must be a list of strings") if library_dirs is None: library_dirs = self.library_dirs elif isinstance(library_dirs, (list, tuple)): - library_dirs = list (library_dirs) + (self.library_dirs or []) + library_dirs = list(library_dirs) + (self.library_dirs or []) else: - raise TypeError, \ - "'library_dirs' (if supplied) must be a list of strings" + raise TypeError( + "'library_dirs' (if supplied) must be a list of strings") if runtime_library_dirs is None: runtime_library_dirs = self.runtime_library_dirs elif isinstance(runtime_library_dirs, (list, tuple)): - runtime_library_dirs = (list (runtime_library_dirs) + + runtime_library_dirs = (list(runtime_library_dirs) + (self.runtime_library_dirs or [])) else: - raise TypeError, \ - "'runtime_library_dirs' (if supplied) " + \ - "must be a list of strings" + raise TypeError("'runtime_library_dirs' (if supplied) " + "must be a list of strings") - return (libraries, library_dirs, runtime_library_dirs) + return libraries, library_dirs, runtime_library_dirs def _need_link(self, objects, output_file): """Return true if we need to relink the files listed in 'objects' to recreate 'output_file'. """ if self.force: - return 1 + return True else: if self.dry_run: newer = newer_group(objects, output_file, missing='newer') @@ -501,7 +493,7 @@ pass def compile(self, sources, output_dir=None, macros=None, - include_dirs=None, debug=0, extra_preargs=None, + include_dirs=None, debug=False, extra_preargs=None, extra_postargs=None, depends=None): """Compile one or more source files. @@ -577,7 +569,7 @@ pass def create_static_lib(self, objects, output_libname, output_dir=None, - debug=0, target_lang=None): + debug=False, target_lang=None): """Link a bunch of stuff together to create a static library file. The "bunch of stuff" consists of the list of object files supplied as 'objects', the extra object files supplied to @@ -609,7 +601,7 @@ def link(self, target_desc, objects, output_filename, output_dir=None, libraries=None, library_dirs=None, runtime_library_dirs=None, - export_symbols=None, debug=0, extra_preargs=None, + export_symbols=None, debug=False, extra_preargs=None, extra_postargs=None, build_temp=None, target_lang=None): """Link a bunch of stuff together to create an executable or shared library file. @@ -662,7 +654,7 @@ def link_shared_lib(self, objects, output_libname, output_dir=None, libraries=None, library_dirs=None, runtime_library_dirs=None, export_symbols=None, - debug=0, extra_preargs=None, extra_postargs=None, + debug=False, extra_preargs=None, extra_postargs=None, build_temp=None, target_lang=None): self.link(CCompiler.SHARED_LIBRARY, objects, self.library_filename(output_libname, lib_type='shared'), @@ -671,11 +663,10 @@ export_symbols, debug, extra_preargs, extra_postargs, build_temp, target_lang) - def link_shared_object(self, objects, output_filename, output_dir=None, libraries=None, library_dirs=None, runtime_library_dirs=None, export_symbols=None, - debug=0, extra_preargs=None, extra_postargs=None, + debug=False, extra_preargs=None, extra_postargs=None, build_temp=None, target_lang=None): self.link(CCompiler.SHARED_OBJECT, objects, output_filename, output_dir, @@ -685,8 +676,9 @@ def link_executable(self, objects, output_progname, output_dir=None, libraries=None, library_dirs=None, - runtime_library_dirs=None, debug=0, extra_preargs=None, - extra_postargs=None, target_lang=None): + runtime_library_dirs=None, debug=False, + extra_preargs=None, extra_postargs=None, + target_lang=None): self.link(CCompiler.EXECUTABLE, objects, self.executable_filename(output_progname), output_dir, libraries, library_dirs, runtime_library_dirs, None, @@ -736,8 +728,7 @@ if library_dirs is None: library_dirs = [] fd, fname = tempfile.mkstemp(".c", funcname, text=True) - f = os.fdopen(fd, "w") - try: + with os.fdopen(fd, "w") as f: for incl in includes: f.write("""#include "%s"\n""" % incl) f.write("""\ @@ -745,8 +736,6 @@ %s(); } """ % funcname) - finally: - f.close() try: objects = self.compile([fname], include_dirs=include_dirs) except CompileError: @@ -760,10 +749,10 @@ return False return True - def find_library_file (self, dirs, lib, debug=0): + def find_library_file(self, dirs, lib, debug=False): """Search the specified list of directories for a static or shared library file 'lib' and return the full path to that file. If - 'debug' true, look for a debugging version (if that makes sense on + 'debug' is true, look for a debugging version (if that makes sense on the current platform). Return None if 'lib' wasn't found in any of the specified directories. """ @@ -803,44 +792,45 @@ # * exe_extension - # extension for executable files, eg. '' or '.exe' - def object_filenames(self, source_filenames, strip_dir=0, output_dir=''): + def object_filenames(self, source_filenames, strip_dir=False, output_dir=''): if output_dir is None: output_dir = '' obj_names = [] for src_name in source_filenames: base, ext = os.path.splitext(src_name) - base = os.path.splitdrive(base)[1] # Chop off the drive + base = os.path.splitdrive(base)[1] # Chop off the drive base = base[os.path.isabs(base):] # If abs, chop off leading / if ext not in self.src_extensions: - raise UnknownFileError, \ - "unknown file type '%s' (from '%s')" % (ext, src_name) + raise UnknownFileError("unknown file type '%s' (from '%s')" % + (ext, src_name)) if strip_dir: base = os.path.basename(base) obj_names.append(os.path.join(output_dir, base + self.obj_extension)) return obj_names - def shared_object_filename(self, basename, strip_dir=0, output_dir=''): + def shared_object_filename(self, basename, strip_dir=False, output_dir=''): assert output_dir is not None if strip_dir: - basename = os.path.basename (basename) + basename = os.path.basename(basename) return os.path.join(output_dir, basename + self.shared_lib_extension) - def executable_filename(self, basename, strip_dir=0, output_dir=''): + def executable_filename(self, basename, strip_dir=False, output_dir=''): assert output_dir is not None if strip_dir: - basename = os.path.basename (basename) + basename = os.path.basename(basename) return os.path.join(output_dir, basename + (self.exe_extension or '')) def library_filename(self, libname, lib_type='static', # or 'shared' - strip_dir=0, output_dir=''): + strip_dir=False, output_dir=''): assert output_dir is not None if lib_type not in ("static", "shared", "dylib"): - raise ValueError, "'lib_type' must be \"static\", \"shared\" or \"dylib\"" + raise ValueError( + "'lib_type' must be 'static', 'shared' or 'dylib'") fmt = getattr(self, lib_type + "_lib_format") ext = getattr(self, lib_type + "_lib_extension") - dir, base = os.path.split (libname) + dir, base = os.path.split(libname) filename = fmt % (base, ext) if strip_dir: dir = '' @@ -850,19 +840,6 @@ # -- Utility methods ----------------------------------------------- - # TODO use logging.info - def announce(self, msg, level=None): - logger.debug(msg) - - def debug_print(self, msg): - from distutils2.debug import DEBUG - if DEBUG: - print msg - - # TODO use logging.warn - def warn(self, msg): - sys.stderr.write("warning: %s\n" % msg) - def execute(self, func, args, msg=None, level=1): execute(func, args, msg, self.dry_run) @@ -870,11 +847,12 @@ spawn(cmd, dry_run=self.dry_run) def move_file(self, src, dst): + logger.info("moving %r to %r", src, dst) if self.dry_run: - return # XXX log ? + return return move(src, dst) - def mkpath(self, name, mode=0777): + def mkpath(self, name, mode=0o777): name = os.path.normpath(name) if os.path.isdir(name) or name == '': return diff --git a/distutils2/compiler/cygwinccompiler.py b/distutils2/compiler/cygwinccompiler.py --- a/distutils2/compiler/cygwinccompiler.py +++ b/distutils2/compiler/cygwinccompiler.py @@ -1,9 +1,9 @@ -"""distutils.cygwinccompiler +"""CCompiler implementations for Cygwin and mingw32 versions of GCC. -Provides the CygwinCCompiler class, a subclass of UnixCCompiler that -handles the Cygwin port of the GNU C compiler to Windows. It also contains -the Mingw32CCompiler class which handles the mingw32 port of GCC (same as -cygwin in no-cygwin mode). +This module contains the CygwinCCompiler class, a subclass of +UnixCCompiler that handles the Cygwin port of the GNU C compiler to +Windows, and the Mingw32CCompiler class which handles the mingw32 port +of GCC (same as cygwin in no-cygwin mode). """ # problems: @@ -48,15 +48,13 @@ import os import sys -import copy -import re -from warnings import warn +from distutils2 import logger from distutils2.compiler.unixccompiler import UnixCCompiler from distutils2.util import write_file -from distutils2.errors import DistutilsExecError, CompileError, UnknownFileError +from distutils2.errors import PackagingExecError, CompileError, UnknownFileError from distutils2.util import get_compiler_versions -from distutils2._backport import sysconfig +import sysconfig def get_msvcr(): @@ -94,13 +92,12 @@ shared_lib_format = "%s%s" exe_extension = ".exe" - def __init__(self, verbose=0, dry_run=0, force=0): + def __init__(self, verbose=0, dry_run=False, force=False): UnixCCompiler.__init__(self, verbose, dry_run, force) status, details = check_config_h() - self.debug_print("Python's GCC status: %s (details: %s)" % - (status, details)) + logger.debug("Python's GCC status: %s (details: %s)", status, details) if status is not CONFIG_H_OK: self.warn( "Python's pyconfig.h doesn't seem to support your compiler. " @@ -110,10 +107,10 @@ self.gcc_version, self.ld_version, self.dllwrap_version = \ get_compiler_versions() - self.debug_print(self.name + ": gcc %s, ld %s, dllwrap %s\n" % - (self.gcc_version, - self.ld_version, - self.dllwrap_version) ) + logger.debug(self.name + ": gcc %s, ld %s, dllwrap %s\n", + self.gcc_version, + self.ld_version, + self.dllwrap_version) # ld_version >= "2.10.90" and < "2.13" should also be able to use # gcc -mdll instead of dllwrap @@ -154,29 +151,29 @@ self.dll_libraries = get_msvcr() def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts): - """Compiles the source by spawing GCC and windres if needed.""" + """Compile the source by spawning GCC and windres if needed.""" if ext == '.rc' or ext == '.res': # gcc needs '.res' and '.rc' compiled to object files !!! try: self.spawn(["windres", "-i", src, "-o", obj]) - except DistutilsExecError, msg: - raise CompileError, msg + except PackagingExecError: + raise CompileError(sys.exc_info()[1]) else: # for other files use the C-compiler try: self.spawn(self.compiler_so + cc_args + [src, '-o', obj] + extra_postargs) - except DistutilsExecError, msg: - raise CompileError, msg + except PackagingExecError: + raise CompileError(sys.exc_info()[1]) def link(self, target_desc, objects, output_filename, output_dir=None, libraries=None, library_dirs=None, runtime_library_dirs=None, - export_symbols=None, debug=0, extra_preargs=None, + export_symbols=None, debug=False, extra_preargs=None, extra_postargs=None, build_temp=None, target_lang=None): """Link the objects.""" # use separate copies, so we can modify the lists - extra_preargs = copy.copy(extra_preargs or []) - libraries = copy.copy(libraries or []) - objects = copy.copy(objects or []) + extra_preargs = list(extra_preargs or []) + libraries = list(libraries or []) + objects = list(objects or []) # Additional libraries libraries.extend(self.dll_libraries) @@ -195,7 +192,7 @@ # where are the object files temp_dir = os.path.dirname(objects[0]) # name of dll to give the helper files the same base name - (dll_name, dll_extension) = os.path.splitext( + dll_name, dll_extension = os.path.splitext( os.path.basename(output_filename)) # generate the filenames for these files @@ -215,13 +212,13 @@ # dllwrap uses different options than gcc/ld if self.linker_dll == "dllwrap": - extra_preargs.extend(["--output-lib", lib_file]) + extra_preargs.extend(("--output-lib", lib_file)) # for dllwrap we have to use a special option - extra_preargs.extend(["--def", def_file]) + extra_preargs.extend(("--def", def_file)) # we use gcc/ld here and can be sure ld is >= 2.9.10 else: # doesn't work: bfd_close build\...\libfoo.a: Invalid operation - #extra_preargs.extend(["-Wl,--out-implib,%s" % lib_file]) + #extra_preargs.extend(("-Wl,--out-implib,%s" % lib_file)) # for gcc/ld the def-file is specified as any object files objects.append(def_file) @@ -246,7 +243,8 @@ # -- Miscellaneous methods ----------------------------------------- - def object_filenames(self, source_filenames, strip_dir=0, output_dir=''): + def object_filenames(self, source_filenames, strip_dir=False, + output_dir=''): """Adds supports for rc and res files.""" if output_dir is None: output_dir = '' @@ -255,8 +253,7 @@ # use normcase to make sure '.rc' is really '.rc' and not '.RC' base, ext = os.path.splitext(os.path.normcase(src_name)) if ext not in (self.src_extensions + ['.rc','.res']): - raise UnknownFileError, \ - "unknown file type '%s' (from '%s')" % (ext, src_name) + raise UnknownFileError("unknown file type '%s' (from '%s')" % (ext, src_name)) if strip_dir: base = os.path.basename (base) if ext in ('.res', '.rc'): @@ -275,7 +272,7 @@ name = 'mingw32' description = 'MinGW32 compiler' - def __init__(self, verbose=0, dry_run=0, force=0): + def __init__(self, verbose=0, dry_run=False, force=False): CygwinCCompiler.__init__ (self, verbose, dry_run, force) @@ -347,14 +344,12 @@ # let's see if __GNUC__ is mentioned in python.h fn = sysconfig.get_config_h_filename() try: - config_h = open(fn) - try: + with open(fn) as config_h: if "__GNUC__" in config_h.read(): return CONFIG_H_OK, "'%s' mentions '__GNUC__'" % fn else: return CONFIG_H_NOTOK, "'%s' does not mention '__GNUC__'" % fn - finally: - config_h.close() - except IOError, exc: + except IOError: + exc = sys.exc_info()[1] return (CONFIG_H_UNCERTAIN, "couldn't read '%s': %s" % (fn, exc.strerror)) diff --git a/distutils2/compiler/extension.py b/distutils2/compiler/extension.py --- a/distutils2/compiler/extension.py +++ b/distutils2/compiler/extension.py @@ -1,10 +1,6 @@ -"""distutils.extension +"""Class representing C/C++ extension modules.""" -Provides the Extension class, used to describe C/C++ extension -modules in setup scripts.""" - - -import warnings +from distutils2 import logger # This class is really only used by the "build_ext" command, so it might # make sense to put it in distutils.command.build_ext. However, that @@ -82,33 +78,22 @@ build process, but simply not install the failing extension. """ - # When adding arguments to this constructor, be sure to update - # setup_keywords in core.py. - def __init__(self, name, sources, - include_dirs=None, - define_macros=None, - undef_macros=None, - library_dirs=None, - libraries=None, - runtime_library_dirs=None, - extra_objects=None, - extra_compile_args=None, - extra_link_args=None, - export_symbols=None, - swig_opts=None, - depends=None, - language=None, - optional=None, - **kw # To catch unknown keywords - ): - if not isinstance(name, str): + # **kwargs are allowed so that a warning is emitted instead of an + # exception + def __init__(self, name, sources, include_dirs=None, define_macros=None, + undef_macros=None, library_dirs=None, libraries=None, + runtime_library_dirs=None, extra_objects=None, + extra_compile_args=None, extra_link_args=None, + export_symbols=None, swig_opts=None, depends=None, + language=None, optional=None, **kw): + if not isinstance(name, basestring): raise AssertionError("'name' must be a string") if not isinstance(sources, list): raise AssertionError("'sources' must be a list of strings") for v in sources: - if not isinstance(v, str): + if not isinstance(v, basestring): raise AssertionError("'sources' must be a list of strings") self.name = name @@ -132,5 +117,5 @@ if len(kw) > 0: options = [repr(option) for option in kw] options = ', '.join(sorted(options)) - msg = "Unknown Extension options: %s" % options - warnings.warn(msg) + logger.warning( + 'unknown arguments given to Extension: %s', options) diff --git a/distutils2/compiler/msvc9compiler.py b/distutils2/compiler/msvc9compiler.py --- a/distutils2/compiler/msvc9compiler.py +++ b/distutils2/compiler/msvc9compiler.py @@ -1,10 +1,7 @@ -"""distutils.msvc9compiler +"""CCompiler implementation for the Microsoft Visual Studio 2008 compiler. -Contains MSVCCompiler, an implementation of the abstract CCompiler class -for the Microsoft Visual Studio 2008. - -The module is compatible with VS 2005 and VS 2008. You can find legacy support -for older versions of VS in distutils.msvccompiler. +The MSVCCompiler class is compatible with VS 2005 and VS 2008. Legacy +support for older versions of VS are in the msvccompiler module. """ # Written by Perry Stoll @@ -16,24 +13,24 @@ import sys import re -from distutils2.errors import (DistutilsExecError, DistutilsPlatformError, - CompileError, LibError, LinkError) +from distutils2.errors import (PackagingExecError, PackagingPlatformError, + CompileError, LibError, LinkError) from distutils2.compiler.ccompiler import CCompiler from distutils2.compiler import gen_lib_options from distutils2 import logger from distutils2.util import get_platform -import _winreg +import winreg -RegOpenKeyEx = _winreg.OpenKeyEx -RegEnumKey = _winreg.EnumKey -RegEnumValue = _winreg.EnumValue -RegError = _winreg.error +RegOpenKeyEx = winreg.OpenKeyEx +RegEnumKey = winreg.EnumKey +RegEnumValue = winreg.EnumValue +RegError = winreg.error -HKEYS = (_winreg.HKEY_USERS, - _winreg.HKEY_CURRENT_USER, - _winreg.HKEY_LOCAL_MACHINE, - _winreg.HKEY_CLASSES_ROOT) +HKEYS = (winreg.HKEY_USERS, + winreg.HKEY_CURRENT_USER, + winreg.HKEY_LOCAL_MACHINE, + winreg.HKEY_CLASSES_ROOT) VS_BASE = r"Software\Microsoft\VisualStudio\%0.1f" WINSDK_BASE = r"Software\Microsoft\Microsoft SDKs\Windows" @@ -132,11 +129,11 @@ else: raise KeyError("sdkinstallrootv2.0") except KeyError: - raise DistutilsPlatformError( - """Python was built with Visual Studio 2008; -extensions must be built with a compiler than can generate compatible binaries. -Visual Studio 2008 was not found on this system. If you have Cygwin installed, -you can try compiling with MingW32, by passing "-c mingw32" to setup.py.""") + raise PackagingPlatformError( +"""Python was built with Visual Studio 2008; extensions must be built with a +compiler than can generate compatible binaries. Visual Studio 2008 was not +found on this system. If you have Cygwin installed, you can try compiling +with MingW32, by passing "-c mingw32" to pysetup.""") if version >= 9.0: self.set_macro("FrameworkVersion", self.vsbase, "clr version") @@ -153,7 +150,7 @@ self.macros["$(FrameworkVersion)"] = d["version"] def sub(self, s): - for k, v in self.macros.iteritems(): + for k, v in self.macros.items(): s = s.replace(k, v) return s @@ -247,7 +244,7 @@ result = {} if vcvarsall is None: - raise DistutilsPlatformError("Unable to find vcvarsall.bat") + raise PackagingPlatformError("Unable to find vcvarsall.bat") logger.debug("calling 'vcvarsall.bat %s' (version=%s)", arch, version) popen = subprocess.Popen('"%s" %s & set' % (vcvarsall, arch), stdout=subprocess.PIPE, @@ -255,7 +252,7 @@ stdout, stderr = popen.communicate() if popen.wait() != 0: - raise DistutilsPlatformError(stderr.decode("mbcs")) + raise PackagingPlatformError(stderr.decode("mbcs")) stdout = stdout.decode("mbcs") for line in stdout.split("\n"): @@ -278,7 +275,7 @@ # More globals VERSION = get_build_version() if VERSION < 8.0: - raise DistutilsPlatformError("VC %0.1f is not supported by this module" % VERSION) + raise PackagingPlatformError("VC %0.1f is not supported by this module" % VERSION) # MACROS = MacroExpander(VERSION) class MSVCCompiler(CCompiler) : @@ -312,8 +309,8 @@ static_lib_format = shared_lib_format = '%s%s' exe_extension = '.exe' - def __init__(self, verbose=0, dry_run=0, force=0): - CCompiler.__init__ (self, verbose, dry_run, force) + def __init__(self, verbose=0, dry_run=False, force=False): + CCompiler.__init__(self, verbose, dry_run, force) self.__version = VERSION self.__root = r"Software\Microsoft\VisualStudio" # self.__macros = MACROS @@ -331,7 +328,7 @@ # sanity check for platforms to prevent obscure errors later. ok_plats = 'win32', 'win-amd64', 'win-ia64' if plat_name not in ok_plats: - raise DistutilsPlatformError("--plat-name must be one of %s" % + raise PackagingPlatformError("--plat-name must be one of %s" % (ok_plats,)) if "DISTUTILS_USE_SDK" in os.environ and "MSSdk" in os.environ and self.find_exe("cl.exe"): @@ -359,12 +356,12 @@ vc_env = query_vcvarsall(VERSION, plat_spec) # take care to only use strings in the environment. - self.__paths = vc_env['path'].encode('mbcs').split(os.pathsep) - os.environ['lib'] = vc_env['lib'].encode('mbcs') - os.environ['include'] = vc_env['include'].encode('mbcs') + self.__paths = vc_env['path'].split(os.pathsep) + os.environ['lib'] = vc_env['lib'] + os.environ['include'] = vc_env['include'] if len(self.__paths) == 0: - raise DistutilsPlatformError("Python was built with %s, " + raise PackagingPlatformError("Python was built with %s, " "and extensions need to be built with the same " "version of the compiler, but it isn't installed." % self.__product) @@ -412,37 +409,37 @@ def object_filenames(self, source_filenames, - strip_dir=0, + strip_dir=False, output_dir=''): # Copied from ccompiler.py, extended to return .res as 'object'-file # for .rc input file if output_dir is None: output_dir = '' obj_names = [] for src_name in source_filenames: - (base, ext) = os.path.splitext (src_name) + base, ext = os.path.splitext(src_name) base = os.path.splitdrive(base)[1] # Chop off the drive base = base[os.path.isabs(base):] # If abs, chop off leading / if ext not in self.src_extensions: # Better to raise an exception instead of silently continuing # and later complain about sources and targets having # different lengths - raise CompileError ("Don't know how to compile %s" % src_name) + raise CompileError("Don't know how to compile %s" % src_name) if strip_dir: - base = os.path.basename (base) + base = os.path.basename(base) if ext in self._rc_extensions: - obj_names.append (os.path.join (output_dir, - base + self.res_extension)) + obj_names.append(os.path.join(output_dir, + base + self.res_extension)) elif ext in self._mc_extensions: - obj_names.append (os.path.join (output_dir, - base + self.res_extension)) + obj_names.append(os.path.join(output_dir, + base + self.res_extension)) else: - obj_names.append (os.path.join (output_dir, - base + self.obj_extension)) + obj_names.append(os.path.join(output_dir, + base + self.obj_extension)) return obj_names def compile(self, sources, - output_dir=None, macros=None, include_dirs=None, debug=0, + output_dir=None, macros=None, include_dirs=None, debug=False, extra_preargs=None, extra_postargs=None, depends=None): if not self.initialized: @@ -452,7 +449,7 @@ macros, objects, extra_postargs, pp_opts, build = compile_info compile_opts = extra_preargs or [] - compile_opts.append ('/c') + compile_opts.append('/c') if debug: compile_opts.extend(self.compile_options_debug) else: @@ -480,8 +477,8 @@ try: self.spawn([self.rc] + pp_opts + [output_opt] + [input_opt]) - except DistutilsExecError, msg: - raise CompileError(msg) + except PackagingExecError: + raise CompileError(sys.exc_info()[1]) continue elif ext in self._mc_extensions: # Compile .MC to .RC file to .RES file. @@ -501,14 +498,14 @@ # first compile .MC to .RC and .H file self.spawn([self.mc] + ['-h', h_dir, '-r', rc_dir] + [src]) - base, _ = os.path.splitext (os.path.basename (src)) - rc_file = os.path.join (rc_dir, base + '.rc') + base, _ = os.path.splitext(os.path.basename(src)) + rc_file = os.path.join(rc_dir, base + '.rc') # then compile .RC to .RES file self.spawn([self.rc] + ["/fo" + obj] + [rc_file]) - except DistutilsExecError, msg: - raise CompileError(msg) + except PackagingExecError: + raise CompileError(sys.exc_info()[1]) continue else: # how to handle this file? @@ -520,8 +517,8 @@ self.spawn([self.cc] + compile_opts + pp_opts + [input_opt, output_opt] + extra_postargs) - except DistutilsExecError, msg: - raise CompileError(msg) + except PackagingExecError: + raise CompileError(sys.exc_info()[1]) return objects @@ -530,12 +527,12 @@ objects, output_libname, output_dir=None, - debug=0, + debug=False, target_lang=None): if not self.initialized: self.initialize() - (objects, output_dir) = self._fix_object_args(objects, output_dir) + objects, output_dir = self._fix_object_args(objects, output_dir) output_filename = self.library_filename(output_libname, output_dir=output_dir) @@ -545,37 +542,26 @@ pass # XXX what goes here? try: self.spawn([self.lib] + lib_args) - except DistutilsExecError, msg: - raise LibError(msg) + except PackagingExecError: + raise LibError(sys.exc_info()[1]) else: logger.debug("skipping %s (up-to-date)", output_filename) - def link(self, - target_desc, - objects, - output_filename, - output_dir=None, - libraries=None, - library_dirs=None, - runtime_library_dirs=None, - export_symbols=None, - debug=0, - extra_preargs=None, - extra_postargs=None, - build_temp=None, - target_lang=None): - + def link(self, target_desc, objects, output_filename, output_dir=None, + libraries=None, library_dirs=None, runtime_library_dirs=None, + export_symbols=None, debug=False, extra_preargs=None, + extra_postargs=None, build_temp=None, target_lang=None): if not self.initialized: self.initialize() - (objects, output_dir) = self._fix_object_args(objects, output_dir) + objects, output_dir = self._fix_object_args(objects, output_dir) fixed_args = self._fix_lib_args(libraries, library_dirs, runtime_library_dirs) - (libraries, library_dirs, runtime_library_dirs) = fixed_args + libraries, library_dirs, runtime_library_dirs = fixed_args if runtime_library_dirs: - self.warn ("I don't know what to do with 'runtime_library_dirs': " - + str (runtime_library_dirs)) + self.warn("don't know what to do with 'runtime_library_dirs': " + + str(runtime_library_dirs)) lib_opts = gen_lib_options(self, library_dirs, runtime_library_dirs, @@ -609,12 +595,12 @@ # builds, they can go into the same directory. build_temp = os.path.dirname(objects[0]) if export_symbols is not None: - (dll_name, dll_ext) = os.path.splitext( + dll_name, dll_ext = os.path.splitext( os.path.basename(output_filename)) implib_file = os.path.join( build_temp, self.library_filename(dll_name)) - ld_args.append ('/IMPLIB:' + implib_file) + ld_args.append('/IMPLIB:' + implib_file) # Embedded manifests are recommended - see MSDN article titled # "How to: Embed a Manifest Inside a C/C++ Application" @@ -634,8 +620,8 @@ self.mkpath(os.path.dirname(output_filename)) try: self.spawn([self.linker] + ld_args) - except DistutilsExecError, msg: - raise LinkError(msg) + except PackagingExecError: + raise LinkError(sys.exc_info()[1]) # embed the manifest # XXX - this is somewhat fragile - if mt.exe fails, distutils @@ -651,8 +637,8 @@ try: self.spawn(['mt.exe', '-nologo', '-manifest', temp_manifest, out_arg]) - except DistutilsExecError, msg: - raise LinkError(msg) + except PackagingExecError: + raise LinkError(sys.exc_info()[1]) else: logger.debug("skipping %s (up-to-date)", output_filename) @@ -664,11 +650,8 @@ # runtimes are not in WinSxS folder, but in Python's own # folder), the runtimes do not need to be in every folder # with .pyd's. - manifest_f = open(manifest_file) - try: + with open(manifest_file) as manifest_f: manifest_buf = manifest_f.read() - finally: - manifest_f.close() pattern = re.compile( r"""|)""", @@ -676,11 +659,8 @@ manifest_buf = re.sub(pattern, "", manifest_buf) pattern = "\s*" manifest_buf = re.sub(pattern, "", manifest_buf) - manifest_f = open(manifest_file, 'w') - try: + with open(manifest_file, 'w') as manifest_f: manifest_f.write(manifest_buf) - finally: - manifest_f.close() except IOError: pass @@ -692,14 +672,14 @@ return "/LIBPATH:" + dir def runtime_library_dir_option(self, dir): - raise DistutilsPlatformError( + raise PackagingPlatformError( "don't know how to set runtime library search path for MSVC++") def library_option(self, lib): return self.library_filename(lib) - def find_library_file(self, dirs, lib, debug=0): + def find_library_file(self, dirs, lib, debug=False): # Prefer a debugging library if found (and requested), but deal # with it if we don't have one. if debug: @@ -708,7 +688,7 @@ try_names = [lib] for dir in dirs: for name in try_names: - libfile = os.path.join(dir, self.library_filename (name)) + libfile = os.path.join(dir, self.library_filename(name)) if os.path.exists(libfile): return libfile else: diff --git a/distutils2/compiler/msvccompiler.py b/distutils2/compiler/msvccompiler.py --- a/distutils2/compiler/msvccompiler.py +++ b/distutils2/compiler/msvccompiler.py @@ -1,7 +1,6 @@ -"""distutils.msvccompiler +"""CCompiler implementation for old Microsoft Visual Studio compilers. -Contains MSVCCompiler, an implementation of the abstract CCompiler class -for the Microsoft Visual Studio. +For a compiler compatible with VS 2005 and 2008, use msvc9compiler. """ # Written by Perry Stoll @@ -11,31 +10,30 @@ import sys import os -import string -from distutils2.errors import (DistutilsExecError, DistutilsPlatformError, - CompileError, LibError, LinkError) +from distutils2.errors import (PackagingExecError, PackagingPlatformError, + CompileError, LibError, LinkError) from distutils2.compiler.ccompiler import CCompiler from distutils2.compiler import gen_lib_options from distutils2 import logger -_can_read_reg = 0 +_can_read_reg = False try: - import _winreg + import winreg - _can_read_reg = 1 - hkey_mod = _winreg + _can_read_reg = True + hkey_mod = winreg - RegOpenKeyEx = _winreg.OpenKeyEx - RegEnumKey = _winreg.EnumKey - RegEnumValue = _winreg.EnumValue - RegError = _winreg.error + RegOpenKeyEx = winreg.OpenKeyEx + RegEnumKey = winreg.EnumKey + RegEnumValue = winreg.EnumValue + RegError = winreg.error except ImportError: try: import win32api import win32con - _can_read_reg = 1 + _can_read_reg = True hkey_mod = win32con RegOpenKeyEx = win32api.RegOpenKeyEx @@ -55,6 +53,7 @@ hkey_mod.HKEY_LOCAL_MACHINE, hkey_mod.HKEY_CLASSES_ROOT) + def read_keys(base, key): """Return list of registry keys.""" @@ -64,7 +63,7 @@ return None L = [] i = 0 - while 1: + while True: try: k = RegEnumKey(handle, i) except RegError: @@ -73,6 +72,7 @@ i = i + 1 return L + def read_values(base, key): """Return dict of registry keys and values. @@ -84,7 +84,7 @@ return None d = {} i = 0 - while 1: + while True: try: name, value, type = RegEnumValue(handle, i) except RegError: @@ -94,6 +94,7 @@ i = i + 1 return d + def convert_mbcs(s): enc = getattr(s, "encode", None) if enc is not None: @@ -103,6 +104,7 @@ pass return s + class MacroExpander(object): def __init__(self, version): @@ -128,11 +130,11 @@ else: self.set_macro("FrameworkSDKDir", net, "sdkinstallroot") except KeyError: - raise DistutilsPlatformError, \ - ("""Python was built with Visual Studio 2003; -extensions must be built with a compiler than can generate compatible binaries. -Visual Studio 2003 was not found on this system. If you have Cygwin installed, -you can try compiling with MingW32, by passing "-c mingw32" to setup.py.""") + raise PackagingPlatformError( +"""Python was built with Visual Studio 2003; extensions must be built with +a compiler than can generate compatible binaries. Visual Studio 2003 was +not found on this system. If you have Cygwin installed, you can try +compiling with MingW32, by passing "-c mingw32" to pysetup.""") p = r"Software\Microsoft\NET Framework Setup\Product" for base in HKEYS: @@ -145,10 +147,11 @@ self.macros["$(FrameworkVersion)"] = d["version"] def sub(self, s): - for k, v in self.macros.iteritems(): - s = string.replace(s, k, v) + for k, v in self.macros.items(): + s = s.replace(k, v) return s + def get_build_version(): """Return the version of MSVC that was used to build Python. @@ -157,7 +160,7 @@ """ prefix = "MSC v." - i = string.find(sys.version, prefix) + i = sys.version.find(prefix) if i == -1: return 6 i = i + len(prefix) @@ -172,6 +175,7 @@ # else we don't know what version of the compiler this is return None + def get_build_architecture(): """Return the processor architecture. @@ -179,12 +183,13 @@ """ prefix = " bit (" - i = string.find(sys.version, prefix) + i = sys.version.find(prefix) if i == -1: return "Intel" - j = string.find(sys.version, ")", i) + j = sys.version.find(")", i) return sys.version[i+len(prefix):j] + def normalize_and_reduce_paths(paths): """Return a list of normalized paths with duplicates removed. @@ -200,7 +205,7 @@ return reduced_paths -class MSVCCompiler (CCompiler) : +class MSVCCompiler(CCompiler): """Concrete class that implements an interface to Microsoft Visual C++, as defined by the CCompiler abstract class.""" @@ -231,8 +236,8 @@ static_lib_format = shared_lib_format = '%s%s' exe_extension = '.exe' - def __init__ (self, verbose=0, dry_run=0, force=0): - CCompiler.__init__ (self, verbose, dry_run, force) + def __init__(self, verbose=0, dry_run=False, force=False): + CCompiler.__init__(self, verbose, dry_run, force) self.__version = get_build_version() self.__arch = get_build_architecture() if self.__arch == "Intel": @@ -251,7 +256,8 @@ def initialize(self): self.__paths = [] - if "DISTUTILS_USE_SDK" in os.environ and "MSSdk" in os.environ and self.find_exe("cl.exe"): + if ("DISTUTILS_USE_SDK" in os.environ and "MSSdk" in os.environ and + self.find_exe("cl.exe")): # Assume that the SDK set up everything alright; don't try to be # smarter self.cc = "cl.exe" @@ -262,11 +268,11 @@ else: self.__paths = self.get_msvc_paths("path") - if len (self.__paths) == 0: - raise DistutilsPlatformError, \ - ("Python was built with %s, " - "and extensions need to be built with the same " - "version of the compiler, but it isn't installed." % self.__product) + if len(self.__paths) == 0: + raise PackagingPlatformError("Python was built with %s " + "and extensions need to be built with the same " + "version of the compiler, but it isn't installed." % + self.__product) self.cc = self.find_exe("cl.exe") self.linker = self.find_exe("link.exe") @@ -278,23 +284,23 @@ # extend the MSVC path with the current path try: - for p in string.split(os.environ['path'], ';'): + for p in os.environ['path'].split(';'): self.__paths.append(p) except KeyError: pass self.__paths = normalize_and_reduce_paths(self.__paths) - os.environ['path'] = string.join(self.__paths, ';') + os.environ['path'] = ';'.join(self.__paths) self.preprocess_options = None if self.__arch == "Intel": - self.compile_options = [ '/nologo', '/Ox', '/MD', '/W3', '/GX' , - '/DNDEBUG'] + self.compile_options = ['/nologo', '/Ox', '/MD', '/W3', '/GX', + '/DNDEBUG'] self.compile_options_debug = ['/nologo', '/Od', '/MDd', '/W3', '/GX', '/Z7', '/D_DEBUG'] else: # Win64 - self.compile_options = [ '/nologo', '/Ox', '/MD', '/W3', '/GS-' , - '/DNDEBUG'] + self.compile_options = ['/nologo', '/Ox', '/MD', '/W3', '/GS-', + '/DNDEBUG'] self.compile_options_debug = ['/nologo', '/Od', '/MDd', '/W3', '/GS-', '/Z7', '/D_DEBUG'] @@ -313,50 +319,46 @@ # -- Worker methods ------------------------------------------------ - def object_filenames (self, - source_filenames, - strip_dir=0, - output_dir=''): + def object_filenames(self, source_filenames, strip_dir=False, output_dir=''): # Copied from ccompiler.py, extended to return .res as 'object'-file # for .rc input file - if output_dir is None: output_dir = '' + if output_dir is None: + output_dir = '' obj_names = [] for src_name in source_filenames: - (base, ext) = os.path.splitext (src_name) - base = os.path.splitdrive(base)[1] # Chop off the drive + base, ext = os.path.splitext(src_name) + base = os.path.splitdrive(base)[1] # Chop off the drive base = base[os.path.isabs(base):] # If abs, chop off leading / if ext not in self.src_extensions: # Better to raise an exception instead of silently continuing # and later complain about sources and targets having # different lengths - raise CompileError ("Don't know how to compile %s" % src_name) + raise CompileError("Don't know how to compile %s" % src_name) if strip_dir: - base = os.path.basename (base) + base = os.path.basename(base) if ext in self._rc_extensions: - obj_names.append (os.path.join (output_dir, - base + self.res_extension)) + obj_names.append(os.path.join(output_dir, + base + self.res_extension)) elif ext in self._mc_extensions: - obj_names.append (os.path.join (output_dir, - base + self.res_extension)) + obj_names.append(os.path.join(output_dir, + base + self.res_extension)) else: - obj_names.append (os.path.join (output_dir, - base + self.obj_extension)) + obj_names.append(os.path.join(output_dir, + base + self.obj_extension)) return obj_names - # object_filenames () - - def compile(self, sources, - output_dir=None, macros=None, include_dirs=None, debug=0, + output_dir=None, macros=None, include_dirs=None, debug=False, extra_preargs=None, extra_postargs=None, depends=None): - if not self.initialized: self.initialize() + if not self.initialized: + self.initialize() macros, objects, extra_postargs, pp_opts, build = \ self._setup_compile(output_dir, macros, include_dirs, sources, depends, extra_postargs) compile_opts = extra_preargs or [] - compile_opts.append ('/c') + compile_opts.append('/c') if debug: compile_opts.extend(self.compile_options_debug) else: @@ -382,10 +384,10 @@ input_opt = src output_opt = "/fo" + obj try: - self.spawn ([self.rc] + pp_opts + - [output_opt] + [input_opt]) - except DistutilsExecError, msg: - raise CompileError, msg + self.spawn([self.rc] + pp_opts + + [output_opt] + [input_opt]) + except PackagingExecError: + raise CompileError(sys.exc_info()[1]) continue elif ext in self._mc_extensions: @@ -401,97 +403,78 @@ # the build directory for the RC file and message # resources. This works at least for win32all. - h_dir = os.path.dirname (src) - rc_dir = os.path.dirname (obj) + h_dir = os.path.dirname(src) + rc_dir = os.path.dirname(obj) try: # first compile .MC to .RC and .H file - self.spawn ([self.mc] + - ['-h', h_dir, '-r', rc_dir] + [src]) - base, _ = os.path.splitext (os.path.basename (src)) - rc_file = os.path.join (rc_dir, base + '.rc') + self.spawn([self.mc] + + ['-h', h_dir, '-r', rc_dir] + [src]) + base, _ = os.path.splitext(os.path.basename(src)) + rc_file = os.path.join(rc_dir, base + '.rc') # then compile .RC to .RES file - self.spawn ([self.rc] + + self.spawn([self.rc] + ["/fo" + obj] + [rc_file]) - except DistutilsExecError, msg: - raise CompileError, msg + except PackagingExecError: + raise CompileError(sys.exc_info()[1]) continue else: # how to handle this file? - raise CompileError ( - "Don't know how to compile %s to %s" % \ + raise CompileError( + "Don't know how to compile %s to %s" % (src, obj)) output_opt = "/Fo" + obj try: - self.spawn ([self.cc] + compile_opts + pp_opts + - [input_opt, output_opt] + - extra_postargs) - except DistutilsExecError, msg: - raise CompileError, msg + self.spawn([self.cc] + compile_opts + pp_opts + + [input_opt, output_opt] + + extra_postargs) + except PackagingExecError: + raise CompileError(sys.exc_info()[1]) return objects - # compile () + def create_static_lib(self, objects, output_libname, output_dir=None, + debug=False, target_lang=None): + if not self.initialized: + self.initialize() + objects, output_dir = self._fix_object_args(objects, output_dir) + output_filename = \ + self.library_filename(output_libname, output_dir=output_dir) - - def create_static_lib (self, - objects, - output_libname, - output_dir=None, - debug=0, - target_lang=None): - - if not self.initialized: self.initialize() - (objects, output_dir) = self._fix_object_args (objects, output_dir) - output_filename = \ - self.library_filename (output_libname, output_dir=output_dir) - - if self._need_link (objects, output_filename): + if self._need_link(objects, output_filename): lib_args = objects + ['/OUT:' + output_filename] if debug: pass # XXX what goes here? try: - self.spawn ([self.lib] + lib_args) - except DistutilsExecError, msg: - raise LibError, msg + self.spawn([self.lib] + lib_args) + except PackagingExecError: + raise LibError(sys.exc_info()[1]) else: logger.debug("skipping %s (up-to-date)", output_filename) - # create_static_lib () + def link(self, target_desc, objects, output_filename, output_dir=None, + libraries=None, library_dirs=None, runtime_library_dirs=None, + export_symbols=None, debug=False, extra_preargs=None, + extra_postargs=None, build_temp=None, target_lang=None): - def link (self, - target_desc, - objects, - output_filename, - output_dir=None, - libraries=None, - library_dirs=None, - runtime_library_dirs=None, - export_symbols=None, - debug=0, - extra_preargs=None, - extra_postargs=None, - build_temp=None, - target_lang=None): - - if not self.initialized: self.initialize() - (objects, output_dir) = self._fix_object_args (objects, output_dir) - (libraries, library_dirs, runtime_library_dirs) = \ - self._fix_lib_args (libraries, library_dirs, runtime_library_dirs) + if not self.initialized: + self.initialize() + objects, output_dir = self._fix_object_args(objects, output_dir) + libraries, library_dirs, runtime_library_dirs = \ + self._fix_lib_args(libraries, library_dirs, runtime_library_dirs) if runtime_library_dirs: - self.warn ("I don't know what to do with 'runtime_library_dirs': " - + str (runtime_library_dirs)) + self.warn("don't know what to do with 'runtime_library_dirs': %s" + % (runtime_library_dirs,)) - lib_opts = gen_lib_options (self, - library_dirs, runtime_library_dirs, - libraries) + lib_opts = gen_lib_options(self, library_dirs, runtime_library_dirs, + libraries) if output_dir is not None: - output_filename = os.path.join (output_dir, output_filename) + output_filename = os.path.join(output_dir, output_filename) - if self._need_link (objects, output_filename): + if self._need_link(objects, output_filename): if target_desc == CCompiler.EXECUTABLE: if debug: @@ -517,46 +500,41 @@ # directory. Since they have different names for debug and release # builds, they can go into the same directory. if export_symbols is not None: - (dll_name, dll_ext) = os.path.splitext( + dll_name, dll_ext = os.path.splitext( os.path.basename(output_filename)) implib_file = os.path.join( os.path.dirname(objects[0]), self.library_filename(dll_name)) - ld_args.append ('/IMPLIB:' + implib_file) + ld_args.append('/IMPLIB:' + implib_file) if extra_preargs: ld_args[:0] = extra_preargs if extra_postargs: ld_args.extend(extra_postargs) - self.mkpath (os.path.dirname (output_filename)) + self.mkpath(os.path.dirname(output_filename)) try: - self.spawn ([self.linker] + ld_args) - except DistutilsExecError, msg: - raise LinkError, msg + self.spawn([self.linker] + ld_args) + except PackagingExecError: + raise LinkError(sys.exc_info()[1]) else: logger.debug("skipping %s (up-to-date)", output_filename) - # link () - - # -- Miscellaneous methods ----------------------------------------- # These are all used by the 'gen_lib_options() function, in # ccompiler.py. - def library_dir_option (self, dir): + def library_dir_option(self, dir): return "/LIBPATH:" + dir - def runtime_library_dir_option (self, dir): - raise DistutilsPlatformError, \ - "don't know how to set runtime library search path for MSVC++" + def runtime_library_dir_option(self, dir): + raise PackagingPlatformError("don't know how to set runtime library search path for MSVC++") - def library_option (self, lib): - return self.library_filename (lib) + def library_option(self, lib): + return self.library_filename(lib) - - def find_library_file (self, dirs, lib, debug=0): + def find_library_file(self, dirs, lib, debug=False): # Prefer a debugging library if found (and requested), but deal # with it if we don't have one. if debug: @@ -565,15 +543,13 @@ try_names = [lib] for dir in dirs: for name in try_names: - libfile = os.path.join(dir, self.library_filename (name)) + libfile = os.path.join(dir, self.library_filename(name)) if os.path.exists(libfile): return libfile else: # Oops, didn't find it in *any* of 'dirs' return None - # find_library_file () - # Helper methods for using the MSVC registry settings def find_exe(self, exe): @@ -592,8 +568,8 @@ return fn # didn't find it; try existing path - for p in string.split(os.environ['Path'],';'): - fn = os.path.join(os.path.abspath(p),exe) + for p in os.environ['Path'].split(';'): + fn = os.path.join(os.path.abspath(p), exe) if os.path.isfile(fn): return fn @@ -621,9 +597,9 @@ d = read_values(base, key) if d: if self.__version >= 7: - return string.split(self.__macros.sub(d[path]), ";") + return self.__macros.sub(d[path]).split(";") else: - return string.split(d[path], ";") + return d[path].split(";") # MSVC 6 seems to create the registry entries we need only when # the GUI is run. if self.__version == 6: @@ -648,7 +624,7 @@ else: p = self.get_msvc_paths(name) if p: - os.environ[name] = string.join(p, ';') + os.environ[name] = ';'.join(p) if get_build_version() >= 8.0: diff --git a/distutils2/compiler/unixccompiler.py b/distutils2/compiler/unixccompiler.py --- a/distutils2/compiler/unixccompiler.py +++ b/distutils2/compiler/unixccompiler.py @@ -1,7 +1,7 @@ -"""distutils.unixccompiler +"""CCompiler implementation for Unix compilers. -Contains the UnixCCompiler class, a subclass of CCompiler that handles -the "typical" Unix-style command-line C compiler: +This module contains the UnixCCompiler class, a subclass of CCompiler +that handles the "typical" Unix-style command-line C compiler: * macros defined with -Dname[=value] * macros undefined with -Uname * include search directories specified with -Idir @@ -13,16 +13,15 @@ * link shared library handled by 'cc -shared' """ - import os, sys from distutils2.util import newer from distutils2.compiler.ccompiler import CCompiler from distutils2.compiler import gen_preprocess_options, gen_lib_options -from distutils2.errors import (DistutilsExecError, CompileError, +from distutils2.errors import (PackagingExecError, CompileError, LibError, LinkError) from distutils2 import logger -from distutils2._backport import sysconfig +import sysconfig # XXX Things not currently handled: @@ -34,7 +33,7 @@ # we need some way for outsiders to feed preprocessor/compiler/linker # flags in to us -- eg. a sysadmin might want to mandate certain flags # via a site config file, or a user might want to set something for -# compiling this module distribution only via the setup.py command +# compiling this module distribution only via the pysetup command # line, whatever. As long as these options come from something on the # current system, they can be as system-dependent as they like, and we # should just happily stuff them into the preprocessor/compiler/linker @@ -49,7 +48,7 @@ build, without a way to remove an architecture. Furthermore GCC will barf if multiple '-isysroot' arguments are present. """ - stripArch = stripSysroot = 0 + stripArch = stripSysroot = False compiler_so = list(compiler_so) kernel_version = os.uname()[2] # 8.4.3 @@ -64,7 +63,7 @@ stripSysroot = '-isysroot' in cc_args if stripArch or 'ARCHFLAGS' in os.environ: - while 1: + while True: try: index = compiler_so.index('-arch') # Strip this argument and the next one: @@ -150,7 +149,7 @@ pp_opts = gen_preprocess_options(macros, include_dirs) pp_args = self.preprocessor + pp_opts if output_file: - pp_args.extend(['-o', output_file]) + pp_args.extend(('-o', output_file)) if extra_preargs: pp_args[:0] = extra_preargs if extra_postargs: @@ -166,8 +165,8 @@ self.mkpath(os.path.dirname(output_file)) try: self.spawn(pp_args) - except DistutilsExecError, msg: - raise CompileError, msg + except PackagingExecError: + raise CompileError(sys.exc_info()[1]) def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts): compiler_so = self.compiler_so @@ -176,11 +175,11 @@ try: self.spawn(compiler_so + cc_args + [src, '-o', obj] + extra_postargs) - except DistutilsExecError, msg: - raise CompileError, msg + except PackagingExecError: + raise CompileError(sys.exc_info()[1]) def create_static_lib(self, objects, output_libname, - output_dir=None, debug=0, target_lang=None): + output_dir=None, debug=False, target_lang=None): objects, output_dir = self._fix_object_args(objects, output_dir) output_filename = \ @@ -200,15 +199,15 @@ if self.ranlib: try: self.spawn(self.ranlib + [output_filename]) - except DistutilsExecError, msg: - raise LibError, msg + except PackagingExecError: + raise LibError(sys.exc_info()[1]) else: logger.debug("skipping %s (up-to-date)", output_filename) def link(self, target_desc, objects, output_filename, output_dir=None, libraries=None, library_dirs=None, runtime_library_dirs=None, - export_symbols=None, debug=0, extra_preargs=None, + export_symbols=None, debug=False, extra_preargs=None, extra_postargs=None, build_temp=None, target_lang=None): objects, output_dir = self._fix_object_args(objects, output_dir) libraries, library_dirs, runtime_library_dirs = \ @@ -216,8 +215,8 @@ lib_opts = gen_lib_options(self, library_dirs, runtime_library_dirs, libraries) - if not isinstance(output_dir, (str, type(None))): - raise TypeError, "'output_dir' must be a string or None" + if type(output_dir) not in (str, type(None)): + raise TypeError("'output_dir' must be a string or None") if output_dir is not None: output_filename = os.path.join(output_dir, output_filename) @@ -254,8 +253,8 @@ linker = _darwin_compiler_fixup(linker, ld_args) self.spawn(linker + ld_args) - except DistutilsExecError, msg: - raise LinkError, msg + except PackagingExecError: + raise LinkError(sys.exc_info()[1]) else: logger.debug("skipping %s (up-to-date)", output_filename) @@ -316,7 +315,7 @@ def library_option(self, lib): return "-l" + lib - def find_library_file(self, dirs, lib, debug=0): + def find_library_file(self, dirs, lib, debug=False): shared_f = self.library_filename(lib, lib_type='shared') dylib_f = self.library_filename(lib, lib_type='dylib') static_f = self.library_filename(lib, lib_type='static') diff --git a/distutils2/config.py b/distutils2/config.py --- a/distutils2/config.py +++ b/distutils2/config.py @@ -1,21 +1,19 @@ -""" distutil2.config +"""Utilities to find and read config files used by distutils2.""" - Know how to read all config files Distutils2 uses. -""" -import os.path +import codecs import os import sys import logging + +from shlex import split from ConfigParser import RawConfigParser -from shlex import split - from distutils2 import logger -from distutils2.errors import DistutilsOptionError +from distutils2.errors import PackagingOptionError from distutils2.compiler.extension import Extension -from distutils2.util import check_environ, resolve_name, strtobool +from distutils2.util import (check_environ, iglob, resolve_name, strtobool, + split_multiline) from distutils2.compiler import set_compiler from distutils2.command import set_command -from distutils2.resources import resources_dests from distutils2.markers import interpret @@ -25,29 +23,54 @@ if not vals_str: return fields = [] - for field in vals_str.split(os.linesep): + # the line separator is \n for setup.cfg files + for field in vals_str.split('\n'): tmp_vals = field.split('--') - if (len(tmp_vals) == 2) and (not interpret(tmp_vals[1])): + if len(tmp_vals) == 2 and not interpret(tmp_vals[1]): continue fields.append(tmp_vals[0]) - # Get bash options like `gcc -print-file-name=libgcc.a` + # Get bash options like `gcc -print-file-name=libgcc.a` XXX bash options? vals = split(' '.join(fields)) if vals: return vals +def _rel_path(base, path): + # normalizes and returns a lstripped-/-separated path + base = base.replace(os.path.sep, '/') + path = path.replace(os.path.sep, '/') + assert path.startswith(base) + return path[len(base):].lstrip('/') + + +def get_resources_dests(resources_root, rules): + """Find destinations for resources files""" + destinations = {} + for base, suffix, dest in rules: + prefix = os.path.join(resources_root, base) + for abs_base in iglob(prefix): + abs_glob = os.path.join(abs_base, suffix) + for abs_path in iglob(abs_glob): + resource_file = _rel_path(resources_root, abs_path) + if dest is None: # remove the entry if it was here + destinations.pop(resource_file, None) + else: + rel_path = _rel_path(abs_base, abs_path) + rel_dest = dest.replace(os.path.sep, '/').rstrip('/') + destinations[resource_file] = rel_dest + '/' + rel_path + return destinations + + class Config(object): - """Reads configuration files and work with the Distribution instance - """ + """Class used to work with configuration files""" def __init__(self, dist): self.dist = dist - self.setup_hook = None + self.setup_hooks = [] - def run_hook(self, config): - if self.setup_hook is None: - return - # the hook gets only the config - self.setup_hook(config) + def run_hooks(self, config): + """Run setup hooks in the order defined in the spec.""" + for hook in self.setup_hooks: + hook(config) def find_config_files(self): """Find as many configuration files as should be processed for this @@ -55,9 +78,9 @@ should be parsed. The filenames returned are guaranteed to exist (modulo nasty race conditions). - There are three possible config files: distutils.cfg in the - Distutils installation directory (ie. where the top-level - Distutils __inst__.py file lives), a file in the user's home + There are three possible config files: distutils2.cfg in the + Packaging installation directory (ie. where the top-level + Packaging __inst__.py file lives), a file in the user's home directory named .pydistutils.cfg on Unix and pydistutils.cfg on Windows/Mac; and setup.cfg in the current directory. @@ -67,11 +90,11 @@ files = [] check_environ() - # Where to look for the system-wide Distutils config file + # Where to look for the system-wide Packaging config file sys_dir = os.path.dirname(sys.modules['distutils2'].__file__) # Look for the system config file - sys_file = os.path.join(sys_dir, "distutils.cfg") + sys_file = os.path.join(sys_dir, "distutils2.cfg") if os.path.isfile(sys_file): files.append(sys_file) @@ -101,33 +124,41 @@ # XXX return value - def _multiline(self, value): - value = [v for v in - [v.strip() for v in value.split('\n')] - if v != ''] - return value - def _read_setup_cfg(self, parser, cfg_filename): cfg_directory = os.path.dirname(os.path.abspath(cfg_filename)) content = {} for section in parser.sections(): content[section] = dict(parser.items(section)) - # global:setup_hook is called *first* + # global setup hooks are called first if 'global' in content: - if 'setup_hook' in content['global']: - setup_hook = content['global']['setup_hook'] - self.setup_hook = resolve_name(setup_hook) - self.run_hook(content) + if 'setup_hooks' in content['global']: + setup_hooks = split_multiline(content['global']['setup_hooks']) + + # add project directory to sys.path, to allow hooks to be + # distributed with the project + sys.path.insert(0, cfg_directory) + try: + for line in setup_hooks: + try: + hook = resolve_name(line) + except ImportError: + logger.warning('cannot find setup hook: %s', + sys.exc_info()[1].args[0]) + else: + self.setup_hooks.append(hook) + self.run_hooks(content) + finally: + sys.path.pop(0) metadata = self.dist.metadata # setting the metadata values if 'metadata' in content: - for key, value in content['metadata'].iteritems(): + for key, value in content['metadata'].items(): key = key.replace('_', '-') if metadata.is_multi_field(key): - value = self._multiline(value) + value = split_multiline(value) if key == 'project-url': value = [(label.strip(), url.strip()) @@ -138,71 +169,74 @@ if 'description' in content['metadata']: msg = ("description and description-file' are " "mutually exclusive") - raise DistutilsOptionError(msg) + raise PackagingOptionError(msg) - if isinstance(value, list): - filenames = value - else: - filenames = value.split() + filenames = value.split() - # concatenate each files - value = '' + # concatenate all files + value = [] for filename in filenames: # will raise if file not found - description_file = open(filename) - try: - value += description_file.read().strip() + '\n' - finally: - description_file.close() + with open(filename) as description_file: + value.append(description_file.read().strip()) # add filename as a required file if filename not in metadata.requires_files: metadata.requires_files.append(filename) - value = value.strip() + value = '\n'.join(value).strip() key = 'description' if metadata.is_metadata_field(key): metadata[key] = self._convert_metadata(key, value) + if 'files' in content: + files = content['files'] + self.dist.package_dir = files.pop('packages_root', None) - if 'files' in content: - def _convert(key, value): - if key not in ('packages_root',): - value = self._multiline(value) - return value + files = dict((key, split_multiline(value)) for key, value in + files.items()) - files = dict([(key, _convert(key, value)) - for key, value in content['files'].iteritems()]) self.dist.packages = [] - self.dist.package_dir = files.get('packages_root') packages = files.get('packages', []) - if isinstance(packages, str): + if isinstance(packages, basestring): packages = [packages] for package in packages: + if ':' in package: + dir_, package = package.split(':') + self.dist.package_dir[package] = dir_ self.dist.packages.append(package) self.dist.py_modules = files.get('modules', []) - if isinstance(self.dist.py_modules, str): + if isinstance(self.dist.py_modules, basestring): self.dist.py_modules = [self.dist.py_modules] self.dist.scripts = files.get('scripts', []) - if isinstance(self.dist.scripts, str): + if isinstance(self.dist.scripts, basestring): self.dist.scripts = [self.dist.scripts] self.dist.package_data = {} for data in files.get('package_data', []): data = data.split('=') if len(data) != 2: - continue # XXX error should never pass silently + continue # XXX error should never pass silently key, value = data self.dist.package_data[key.strip()] = value.strip() + self.dist.data_files = [] + for data in files.get('data_files', []): + data = data.split('=') + if len(data) != 2: + continue + key, value = data + values = [v.strip() for v in value.split(',')] + self.dist.data_files.append((key, values)) + # manifest template self.dist.extra_files = files.get('extra_files', []) resources = [] for rule in files.get('resources', []): - glob , destination = rule.split('=', 1) + glob, destination = rule.split('=', 1) rich_glob = glob.strip().split(' ', 1) if len(rich_glob) == 2: prefix, suffix = rich_glob @@ -212,13 +246,15 @@ suffix = glob if destination == '': destination = None - resources.append((prefix.strip(), suffix.strip(), destination.strip())) - self.dist.data_files = resources_dests(cfg_directory, resources) + resources.append( + (prefix.strip(), suffix.strip(), destination.strip())) + self.dist.data_files = get_resources_dests( + cfg_directory, resources) ext_modules = self.dist.ext_modules for section_key in content: labels = section_key.split('=') - if (len(labels) == 2) and (labels[0] == 'extension'): + if len(labels) == 2 and labels[0] == 'extension': # labels[1] not used from now but should be implemented # for extension build dependency values_dct = content[section_key] @@ -239,8 +275,7 @@ _pop_values(values_dct, 'depends'), values_dct.pop('language', None), values_dct.pop('optional', None), - **values_dct - )) + **values_dct)) def parse_config_files(self, filenames=None): if filenames is None: @@ -252,7 +287,8 @@ for filename in filenames: logger.debug(" reading %s", filename) - parser.read(filename) + with codecs.open(filename, 'r', encoding='utf-8') as f: + parser.readfp(f) if os.path.split(filename)[-1] == 'setup.cfg': self._read_setup_cfg(parser, filename) @@ -275,8 +311,8 @@ opt = opt.replace('-', '_') if opt == 'sub_commands': - val = self._multiline(val) - if isinstance(val, str): + val = split_multiline(val) + if isinstance(val, basestring): val = [val] # Hooks use a suffix system to prevent being overriden @@ -287,8 +323,8 @@ if (opt.startswith("pre_hook.") or opt.startswith("post_hook.")): hook_type, alias = opt.split(".") - hook_dict = opt_dict.setdefault(hook_type, - (filename, {}))[1] + hook_dict = opt_dict.setdefault( + hook_type, (filename, {}))[1] hook_dict[alias] = val else: opt_dict[opt] = filename, val @@ -300,28 +336,28 @@ # If there was a "global" section in the config file, use it # to set Distribution options. if 'global' in self.dist.command_options: - for (opt, (src, val)) in self.dist.command_options['global'].iteritems(): + for opt, (src, val) in self.dist.command_options['global'].items(): alias = self.dist.negative_opt.get(opt) try: if alias: setattr(self.dist, alias, not strtobool(val)) - elif opt in ('verbose', 'dry_run'): # ugh! + elif opt == 'dry_run': # FIXME ugh! setattr(self.dist, opt, strtobool(val)) else: setattr(self.dist, opt, val) - except ValueError, msg: - raise DistutilsOptionError(msg) + except ValueError: + raise PackagingOptionError(sys.exc_info()[1]) def _load_compilers(self, compilers): - compilers = self._multiline(compilers) - if isinstance(compilers, str): + compilers = split_multiline(compilers) + if isinstance(compilers, basestring): compilers = [compilers] for compiler in compilers: set_compiler(compiler.strip()) def _load_commands(self, commands): - commands = self._multiline(commands) - if isinstance(commands, str): + commands = split_multiline(commands) + if isinstance(commands, basestring): commands = [commands] for command in commands: set_command(command.strip()) diff --git a/distutils2/create.py b/distutils2/create.py new file mode 100644 --- /dev/null +++ b/distutils2/create.py @@ -0,0 +1,689 @@ +"""Interactive helper used to create a setup.cfg file. + +This script will generate a distutils2 configuration file by looking at +the current directory and asking the user questions. It is intended to +be called as *pysetup create*. +""" + +# Original code by Sean Reifschneider + +# Original TODO list: +# Look for a license file and automatically add the category. +# When a .c file is found during the walk, can we add it as an extension? +# Ask if there is a maintainer different that the author +# Ask for the platform (can we detect this via "import win32" or something?) +# Ask for the dependencies. +# Ask for the Requires-Dist +# Ask for the Provides-Dist +# Ask for a description +# Detect scripts (not sure how. #! outside of package?) + +import codecs +import os +import re +import imp +import sys +import glob +import shutil +import sysconfig +import tokenize +from hashlib import md5 +from textwrap import dedent +from distutils2.util import cmp_to_key, detect_encoding +from ConfigParser import RawConfigParser +# importing this with an underscore as it should be replaced by the +# dict form or another structures for all purposes +from distutils2._trove import all_classifiers as _CLASSIFIERS_LIST +from distutils2.version import is_valid_version + +_FILENAME = 'setup.cfg' +_DEFAULT_CFG = '.pypkgcreate' + +_helptext = { + 'name': ''' +The name of the program to be packaged, usually a single word composed +of lower-case characters such as "python", "sqlalchemy", or "CherryPy". +''', + 'version': ''' +Version number of the software, typically 2 or 3 numbers separated by dots +such as "1.00", "0.6", or "3.02.01". "0.1.0" is recommended for initial +development. +''', + 'summary': ''' +A one-line summary of what this project is or does, typically a sentence 80 +characters or less in length. +''', + 'author': ''' +The full name of the author (typically you). +''', + 'author_email': ''' +E-mail address of the project author (typically you). +''', + 'do_classifier': ''' +Trove classifiers are optional identifiers that allow you to specify the +intended audience by saying things like "Beta software with a text UI +for Linux under the PSF license". However, this can be a somewhat involved +process. +''', + 'packages': ''' +You can provide a package name contained in your project. +''', + 'modules': ''' +You can provide a python module contained in your project. +''', + 'extra_files': ''' +You can provide extra files/dirs contained in your project. +It has to follow the template syntax. XXX add help here. +''', + + 'home_page': ''' +The home page for the project, typically starting with "http://". +''', + 'trove_license': ''' +Optionally you can specify a license. Type a string that identifies a common +license, and then you can select a list of license specifiers. +''', + 'trove_generic': ''' +Optionally, you can set other trove identifiers for things such as the +human language, programming language, user interface, etc... +''', + 'setup.py found': ''' +The setup.py script will be executed to retrieve the metadata. +An interactive helper will be run if you answer "n", +''', +} + +PROJECT_MATURITY = ['Development Status :: 1 - Planning', + 'Development Status :: 2 - Pre-Alpha', + 'Development Status :: 3 - Alpha', + 'Development Status :: 4 - Beta', + 'Development Status :: 5 - Production/Stable', + 'Development Status :: 6 - Mature', + 'Development Status :: 7 - Inactive'] + +# XXX everything needs docstrings and tests (both low-level tests of various +# methods and functional tests of running the script) + + +def load_setup(): + """run the setup script (i.e the setup.py file) + + This function load the setup file in all cases (even if it have already + been loaded before, because we are monkey patching its setup function with + a particular one""" + with open("setup.py", "rb") as f: + encoding, lines = detect_encoding(f.readline) + with open("setup.py") as f: + imp.load_module("setup", f, "setup.py", (".py", "r", imp.PY_SOURCE)) + + +def ask_yn(question, default=None, helptext=None): + question += ' (y/n)' + while True: + answer = ask(question, default, helptext, required=True) + if answer and answer[0].lower() in 'yn': + return answer[0].lower() + + print('\nERROR: You must select "Y" or "N".\n') + + +def ask(question, default=None, helptext=None, required=True, + lengthy=False, multiline=False): + prompt = u'%s: ' % (question,) + if default: + prompt = u'%s [%s]: ' % (question, default) + if default and len(question) + len(default) > 70: + prompt = u'%s\n [%s]: ' % (question, default) + if lengthy or multiline: + prompt += '\n > ' + + if not helptext: + helptext = 'No additional help available.' + + helptext = helptext.strip("\n") + + while True: + sys.stdout.write(prompt) + sys.stdout.flush() + + line = sys.stdin.readline().strip() + if line == '?': + print('=' * 70) + print(helptext) + print('=' * 70) + continue + if default and not line: + return default + if not line and required: + print('*' * 70) + print('This value cannot be empty.') + print('===========================') + if helptext: + print(helptext) + print('*' * 70) + continue + return line + + +def convert_yn_to_bool(yn, yes=True, no=False): + """Convert a y/yes or n/no to a boolean value.""" + if yn.lower().startswith('y'): + return yes + else: + return no + + +def _build_classifiers_dict(classifiers): + d = {} + for key in classifiers: + subdict = d + for subkey in key.split(' :: '): + if subkey not in subdict: + subdict[subkey] = {} + subdict = subdict[subkey] + return d + +CLASSIFIERS = _build_classifiers_dict(_CLASSIFIERS_LIST) + + +def _build_licences(classifiers): + res = [] + for index, item in enumerate(classifiers): + if not item.startswith('License :: '): + continue + res.append((index, item.split(' :: ')[-1].lower())) + return res + +LICENCES = _build_licences(_CLASSIFIERS_LIST) + + +class MainProgram(object): + """Make a project setup configuration file (setup.cfg).""" + + def __init__(self): + self.configparser = None + self.classifiers = set() + self.data = {'name': '', + 'version': '1.0.0', + 'classifier': self.classifiers, + 'packages': [], + 'modules': [], + 'platform': [], + 'resources': [], + 'extra_files': [], + 'scripts': [], + } + self._load_defaults() + + def __call__(self): + setupcfg_defined = False + if self.has_setup_py() and self._prompt_user_for_conversion(): + setupcfg_defined = self.convert_py_to_cfg() + if not setupcfg_defined: + self.define_cfg_values() + self._write_cfg() + + def has_setup_py(self): + """Test for the existence of a setup.py file.""" + return os.path.exists('setup.py') + + def define_cfg_values(self): + self.inspect() + self.query_user() + + def _lookup_option(self, key): + if not self.configparser.has_option('DEFAULT', key): + return None + return self.configparser.get('DEFAULT', key) + + def _load_defaults(self): + # Load default values from a user configuration file + self.configparser = RawConfigParser() + # TODO replace with section in distutils config file + default_cfg = os.path.expanduser(os.path.join('~', _DEFAULT_CFG)) + self.configparser.read(default_cfg) + self.data['author'] = self._lookup_option('author') + self.data['author_email'] = self._lookup_option('author_email') + + def _prompt_user_for_conversion(self): + # Prompt the user about whether they would like to use the setup.py + # conversion utility to generate a setup.cfg or generate the setup.cfg + # from scratch + answer = ask_yn(('A legacy setup.py has been found.\n' + 'Would you like to convert it to a setup.cfg?'), + default="y", + helptext=_helptext['setup.py found']) + return convert_yn_to_bool(answer) + + def _dotted_packages(self, data): + packages = sorted(data) + modified_pkgs = [] + for pkg in packages: + pkg = pkg.lstrip('./') + pkg = pkg.replace('/', '.') + modified_pkgs.append(pkg) + return modified_pkgs + + def _write_cfg(self): + if os.path.exists(_FILENAME): + if os.path.exists('%s.old' % _FILENAME): + print("ERROR: %(name)s.old backup exists, please check that " + "current %(name)s is correct and remove %(name)s.old" % + {'name': _FILENAME}) + return + shutil.move(_FILENAME, '%s.old' % _FILENAME) + + with codecs.open(_FILENAME, 'w', encoding='utf-8') as fp: + fp.write('[metadata]\n') + # TODO use metadata module instead of hard-coding field-specific + # behavior here + + # simple string entries + for name in ('name', 'version', 'summary', 'download_url'): + fp.write('%s = %s\n' % (name, self.data.get(name, 'UNKNOWN'))) + + # optional string entries + if 'keywords' in self.data and self.data['keywords']: + fp.write('keywords = %s\n' % ' '.join(self.data['keywords'])) + for name in ('home_page', 'author', 'author_email', + 'maintainer', 'maintainer_email', 'description-file'): + if name in self.data and self.data[name]: + fp.write('%s = %s\n' % (name, self.data[name])) + if 'description' in self.data: + fp.write( + 'description = %s\n' + % '\n |'.join(self.data['description'].split('\n'))) + + # multiple use string entries + for name in ('platform', 'supported-platform', 'classifier', + 'requires-dist', 'provides-dist', 'obsoletes-dist', + 'requires-external'): + if not(name in self.data and self.data[name]): + continue + fp.write('%s = ' % name) + fp.write(''.join(' %s\n' % val + for val in self.data[name]).lstrip()) + fp.write('\n[files]\n') + for name in ('packages', 'modules', 'scripts', + 'package_data', 'extra_files'): + if not(name in self.data and self.data[name]): + continue + fp.write('%s = %s\n' + % (name, '\n '.join(self.data[name]).strip())) + fp.write('\nresources =\n') + for src, dest in self.data['resources']: + fp.write(' %s = %s\n' % (src, dest)) + fp.write('\n') + + os.chmod(_FILENAME, 0o644) + print('Wrote "%s".' % _FILENAME) + + def convert_py_to_cfg(self): + """Generate a setup.cfg from an existing setup.py. + + It only exports the distutils metadata (setuptools specific metadata + is not currently supported). + """ + data = self.data + + def setup_mock(**attrs): + """Mock the setup(**attrs) in order to retrieve metadata.""" + + # TODO use config and metadata instead of Distribution + from distutils.dist import Distribution + dist = Distribution(attrs) + dist.parse_config_files() + + # 1. retrieve metadata fields that are quite similar in + # PEP 314 and PEP 345 + labels = (('name',) * 2, + ('version',) * 2, + ('author',) * 2, + ('author_email',) * 2, + ('maintainer',) * 2, + ('maintainer_email',) * 2, + ('description', 'summary'), + ('long_description', 'description'), + ('url', 'home_page'), + ('platforms', 'platform'), + # backport only for 2.5+ + ('provides', 'provides-dist'), + ('obsoletes', 'obsoletes-dist'), + ('requires', 'requires-dist')) + + get = lambda lab: getattr(dist.metadata, lab.replace('-', '_')) + data.update((new, get(old)) for old, new in labels if get(old)) + + # 2. retrieve data that requires special processing + data['classifier'].update(dist.get_classifiers() or []) + data['scripts'].extend(dist.scripts or []) + data['packages'].extend(dist.packages or []) + data['modules'].extend(dist.py_modules or []) + # 2.1 data_files -> resources + if dist.data_files: + if (len(dist.data_files) < 2 or + isinstance(dist.data_files[1], basestring)): + dist.data_files = [('', dist.data_files)] + # add tokens in the destination paths + vars = {'distribution.name': data['name']} + path_tokens = list(sysconfig.get_paths(vars=vars).items()) + + # TODO replace this with a key function + def length_comparison(x, y): + len_x = len(x[1]) + len_y = len(y[1]) + if len_x == len_y: + return 0 + elif len_x < len_y: + return -1 + else: + return 1 + + # sort tokens to use the longest one first + path_tokens.sort(key=cmp_to_key(length_comparison)) + for dest, srcs in (dist.data_files or []): + dest = os.path.join(sys.prefix, dest) + dest = dest.replace(os.path.sep, '/') + for tok, path in path_tokens: + path = path.replace(os.path.sep, '/') + if not dest.startswith(path): + continue + + dest = ('{%s}' % tok) + dest[len(path):] + files = [('/ '.join(src.rsplit('/', 1)), dest) + for src in srcs] + data['resources'].extend(files) + + # 2.2 package_data -> extra_files + package_dirs = dist.package_dir or {} + for package, extras in dist.package_data.items() or []: + package_dir = package_dirs.get(package, package) + for file_ in extras: + if package_dir: + file_ = package_dir + '/' + file_ + data['extra_files'].append(file_) + + # Use README file if its content is the desciption + if "description" in data: + ref = md5(re.sub('\s', '', + self.data['description']).lower().encode()) + ref = ref.digest() + for readme in glob.glob('README*'): + with codecs.open(readme, encoding='utf-8') as fp: + contents = fp.read() + contents = re.sub('\s', '', contents.lower()).encode() + val = md5(contents).digest() + if val == ref: + del data['description'] + data['description-file'] = readme + break + + # apply monkey patch to distutils (v1) and setuptools (if needed) + # (abort the feature if distutils v1 has been killed) + try: + from distutils import core + core.setup # make sure it's not d2 maskerading as d1 + except (ImportError, AttributeError): + return + saved_setups = [(core, core.setup)] + core.setup = setup_mock + try: + import setuptools + except ImportError: + pass + else: + saved_setups.append((setuptools, setuptools.setup)) + setuptools.setup = setup_mock + # get metadata by executing the setup.py with the patched setup(...) + success = False # for python < 2.4 + try: + load_setup() + success = True + finally: # revert monkey patches + for patched_module, original_setup in saved_setups: + patched_module.setup = original_setup + if not self.data: + raise ValueError('Unable to load metadata from setup.py') + return success + + def inspect(self): + """Inspect the current working diretory for a name and version. + + This information is harvested in where the directory is named + like [name]-[version]. + """ + dir_name = os.path.basename(os.getcwd()) + self.data['name'] = dir_name + match = re.match(r'(.*)-(\d.+)', dir_name) + if match: + self.data['name'] = match.group(1) + self.data['version'] = match.group(2) + # TODO needs testing! + if not is_valid_version(self.data['version']): + msg = "Invalid version discovered: %s" % self.data['version'] + raise ValueError(msg) + + def query_user(self): + self.data['name'] = ask('Project name', self.data['name'], + _helptext['name']) + + self.data['version'] = ask('Current version number', + self.data.get('version'), _helptext['version']) + self.data['summary'] = ask('Package summary', + self.data.get('summary'), _helptext['summary'], + lengthy=True) + self.data['author'] = ask('Author name', + self.data.get('author'), _helptext['author']) + self.data['author_email'] = ask('Author e-mail address', + self.data.get('author_email'), _helptext['author_email']) + self.data['home_page'] = ask('Project home page', + self.data.get('home_page'), _helptext['home_page'], + required=False) + + if ask_yn('Do you want me to automatically build the file list ' + 'with everything I can find in the current directory? ' + 'If you say no, you will have to define them manually.') == 'y': + self._find_files() + else: + while ask_yn('Do you want to add a single module?' + ' (you will be able to add full packages next)', + helptext=_helptext['modules']) == 'y': + self._set_multi('Module name', 'modules') + + while ask_yn('Do you want to add a package?', + helptext=_helptext['packages']) == 'y': + self._set_multi('Package name', 'packages') + + while ask_yn('Do you want to add an extra file?', + helptext=_helptext['extra_files']) == 'y': + self._set_multi('Extra file/dir name', 'extra_files') + + if ask_yn('Do you want to set Trove classifiers?', + helptext=_helptext['do_classifier']) == 'y': + self.set_classifier() + + def _find_files(self): + # we are looking for python modules and packages, + # other stuff are added as regular files + pkgs = self.data['packages'] + modules = self.data['modules'] + extra_files = self.data['extra_files'] + + def is_package(path): + return os.path.exists(os.path.join(path, '__init__.py')) + + curdir = os.getcwd() + scanned = [] + _pref = ['lib', 'include', 'dist', 'build', '.', '~'] + _suf = ['.pyc'] + + def to_skip(path): + path = relative(path) + + for pref in _pref: + if path.startswith(pref): + return True + + for suf in _suf: + if path.endswith(suf): + return True + + return False + + def relative(path): + return path[len(curdir) + 1:] + + def dotted(path): + res = relative(path).replace(os.path.sep, '.') + if res.endswith('.py'): + res = res[:-len('.py')] + return res + + # first pass: packages + for root, dirs, files in os.walk(curdir): + if to_skip(root): + continue + for dir_ in sorted(dirs): + if to_skip(dir_): + continue + fullpath = os.path.join(root, dir_) + dotted_name = dotted(fullpath) + if is_package(fullpath) and dotted_name not in pkgs: + pkgs.append(dotted_name) + scanned.append(fullpath) + + # modules and extra files + for root, dirs, files in os.walk(curdir): + if to_skip(root): + continue + + if any(root.startswith(path) for path in scanned): + continue + + for file in sorted(files): + fullpath = os.path.join(root, file) + if to_skip(fullpath): + continue + # single module? + if os.path.splitext(file)[-1] == '.py': + modules.append(dotted(fullpath)) + else: + extra_files.append(relative(fullpath)) + + def _set_multi(self, question, name): + existing_values = self.data[name] + value = ask(question, helptext=_helptext[name]).strip() + if value not in existing_values: + existing_values.append(value) + + def set_classifier(self): + self.set_maturity_status(self.classifiers) + self.set_license(self.classifiers) + self.set_other_classifier(self.classifiers) + + def set_other_classifier(self, classifiers): + if ask_yn('Do you want to set other trove identifiers?', 'n', + _helptext['trove_generic']) != 'y': + return + self.walk_classifiers(classifiers, [CLASSIFIERS], '') + + def walk_classifiers(self, classifiers, trovepath, desc): + trove = trovepath[-1] + + if not trove: + return + + for key in sorted(trove): + if len(trove[key]) == 0: + if ask_yn('Add "%s"' % desc[4:] + ' :: ' + key, 'n') == 'y': + classifiers.add(desc[4:] + ' :: ' + key) + continue + + if ask_yn('Do you want to set items under\n "%s" (%d sub-items)?' + % (key, len(trove[key])), 'n', + _helptext['trove_generic']) == 'y': + self.walk_classifiers(classifiers, trovepath + [trove[key]], + desc + ' :: ' + key) + + def set_license(self, classifiers): + while True: + license = ask('What license do you use?', + helptext=_helptext['trove_license'], required=False) + if not license: + return + + license_words = license.lower().split(' ') + found_list = [] + + for index, licence in LICENCES: + for word in license_words: + if word in licence: + found_list.append(index) + break + + if len(found_list) == 0: + print('ERROR: Could not find a matching license for "%s"' % + license) + continue + + question = 'Matching licenses:\n\n' + + for index, list_index in enumerate(found_list): + question += ' %s) %s\n' % (index + 1, + _CLASSIFIERS_LIST[list_index]) + + question += ('\nType the number of the license you wish to use or ' + '? to try again:') + choice = ask(question, required=False) + + if choice == '?': + continue + if choice == '': + return + + try: + index = found_list[int(choice) - 1] + except ValueError: + print("ERROR: Invalid selection, type a number from the list " + "above.") + + classifiers.add(_CLASSIFIERS_LIST[index]) + + def set_maturity_status(self, classifiers): + maturity_name = lambda mat: mat.split('- ')[-1] + maturity_question = '''\ + Please select the project status: + + %s + + Status''' % '\n'.join('%s - %s' % (i, maturity_name(n)) + for i, n in enumerate(PROJECT_MATURITY)) + while True: + choice = ask(dedent(maturity_question), required=False) + + if choice: + try: + choice = int(choice) - 1 + key = PROJECT_MATURITY[choice] + classifiers.add(key) + return + except (IndexError, ValueError): + print("ERROR: Invalid selection, type a single digit " + "number.") + + +def main(): + """Main entry point.""" + program = MainProgram() + # # uncomment when implemented + # if not program.load_existing_setup_script(): + # program.inspect_directory() + # program.query_user() + # program.update_config_file() + # program.write_setup_script() + # distutils2.util.cfg_to_args() + program() + + +if __name__ == '__main__': + main() diff --git a/distutils2/database.py b/distutils2/database.py new file mode 100644 --- /dev/null +++ b/distutils2/database.py @@ -0,0 +1,647 @@ +"""PEP 376 implementation.""" + +from StringIO import StringIO +import os +import re +import csv +import sys +import zipimport +from hashlib import md5 +from distutils2 import logger +from distutils2.errors import PackagingError +from distutils2.version import suggest_normalized_version, VersionPredicate +from distutils2.metadata import Metadata + + +__all__ = [ + 'Distribution', 'EggInfoDistribution', 'distinfo_dirname', + 'get_distributions', 'get_distribution', 'get_file_users', + 'provides_distribution', 'obsoletes_distribution', + 'enable_cache', 'disable_cache', 'clear_cache', + 'get_file_path', 'get_file'] + + +# TODO update docs + +DIST_FILES = ('INSTALLER', 'METADATA', 'RECORD', 'REQUESTED', 'RESOURCES') + +# Cache +_cache_name = {} # maps names to Distribution instances +_cache_name_egg = {} # maps names to EggInfoDistribution instances +_cache_path = {} # maps paths to Distribution instances +_cache_path_egg = {} # maps paths to EggInfoDistribution instances +_cache_generated = False # indicates if .dist-info distributions are cached +_cache_generated_egg = False # indicates if .dist-info and .egg are cached +_cache_enabled = True + + +def enable_cache(): + """ + Enables the internal cache. + + Note that this function will not clear the cache in any case, for that + functionality see :func:`clear_cache`. + """ + global _cache_enabled + + _cache_enabled = True + + +def disable_cache(): + """ + Disables the internal cache. + + Note that this function will not clear the cache in any case, for that + functionality see :func:`clear_cache`. + """ + global _cache_enabled + + _cache_enabled = False + + +def clear_cache(): + """ Clears the internal cache. """ + global _cache_generated, _cache_generated_egg + + _cache_name.clear() + _cache_name_egg.clear() + _cache_path.clear() + _cache_path_egg.clear() + _cache_generated = False + _cache_generated_egg = False + + +def _yield_distributions(include_dist, include_egg, paths): + """ + Yield .dist-info and .egg(-info) distributions, based on the arguments + + :parameter include_dist: yield .dist-info distributions + :parameter include_egg: yield .egg(-info) distributions + """ + for path in paths: + realpath = os.path.realpath(path) + if not os.path.isdir(realpath): + continue + for dir in os.listdir(realpath): + dist_path = os.path.join(realpath, dir) + if include_dist and dir.endswith('.dist-info'): + yield Distribution(dist_path) + elif include_egg and (dir.endswith('.egg-info') or + dir.endswith('.egg')): + yield EggInfoDistribution(dist_path) + + +def _generate_cache(use_egg_info, paths): + global _cache_generated, _cache_generated_egg + + if _cache_generated_egg or (_cache_generated and not use_egg_info): + return + else: + gen_dist = not _cache_generated + gen_egg = use_egg_info + + for dist in _yield_distributions(gen_dist, gen_egg, paths): + if isinstance(dist, Distribution): + _cache_path[dist.path] = dist + if dist.name not in _cache_name: + _cache_name[dist.name] = [] + _cache_name[dist.name].append(dist) + else: + _cache_path_egg[dist.path] = dist + if dist.name not in _cache_name_egg: + _cache_name_egg[dist.name] = [] + _cache_name_egg[dist.name].append(dist) + + if gen_dist: + _cache_generated = True + if gen_egg: + _cache_generated_egg = True + + +class Distribution(object): + """Created with the *path* of the ``.dist-info`` directory provided to the + constructor. It reads the metadata contained in ``METADATA`` when it is + instantiated.""" + + name = '' + """The name of the distribution.""" + + version = '' + """The version of the distribution.""" + + metadata = None + """A :class:`distutils2.metadata.Metadata` instance loaded with + the distribution's ``METADATA`` file.""" + + requested = False + """A boolean that indicates whether the ``REQUESTED`` metadata file is + present (in other words, whether the package was installed by user + request or it was installed as a dependency).""" + + def __init__(self, path): + if _cache_enabled and path in _cache_path: + self.metadata = _cache_path[path].metadata + else: + metadata_path = os.path.join(path, 'METADATA') + self.metadata = Metadata(path=metadata_path) + + self.name = self.metadata['Name'] + self.version = self.metadata['Version'] + self.path = path + + if _cache_enabled and path not in _cache_path: + _cache_path[path] = self + + def __repr__(self): + return '' % ( + self.name, self.version, self.path) + + def _get_records(self, local=False): + results = [] + with self.get_distinfo_file('RECORD') as record: + record_reader = csv.reader(record, delimiter=',', + lineterminator='\n') + for row in record_reader: + missing = [None for i in range(len(row), 3)] + path, checksum, size = row + missing + if local: + path = path.replace('/', os.sep) + path = os.path.join(sys.prefix, path) + results.append((path, checksum, size)) + return results + + def get_resource_path(self, relative_path): + with self.get_distinfo_file('RESOURCES') as resources_file: + resources_reader = csv.reader(resources_file, delimiter=',', + lineterminator='\n') + for relative, destination in resources_reader: + if relative == relative_path: + return destination + raise KeyError( + 'no resource file with relative path %r is installed' % + relative_path) + + def list_installed_files(self, local=False): + """ + Iterates over the ``RECORD`` entries and returns a tuple + ``(path, md5, size)`` for each line. If *local* is ``True``, + the returned path is transformed into a local absolute path. + Otherwise the raw value from RECORD is returned. + + A local absolute path is an absolute path in which occurrences of + ``'/'`` have been replaced by the system separator given by ``os.sep``. + + :parameter local: flag to say if the path should be returned a local + absolute path + + :type local: boolean + :returns: iterator of (path, md5, size) + """ + for result in self._get_records(local): + yield result + + def uses(self, path): + """ + Returns ``True`` if path is listed in ``RECORD``. *path* can be a local + absolute path or a relative ``'/'``-separated path. + + :rtype: boolean + """ + for p, checksum, size in self._get_records(): + local_absolute = os.path.join(sys.prefix, p) + if path == p or path == local_absolute: + return True + return False + + def get_distinfo_file(self, path, binary=False): + """ + Returns a file located under the ``.dist-info`` directory. Returns a + ``file`` instance for the file pointed by *path*. + + :parameter path: a ``'/'``-separated path relative to the + ``.dist-info`` directory or an absolute path; + If *path* is an absolute path and doesn't start + with the ``.dist-info`` directory path, + a :class:`PackagingError` is raised + :type path: string + :parameter binary: If *binary* is ``True``, opens the file in read-only + binary mode (``rb``), otherwise opens it in + read-only mode (``r``). + :rtype: file object + """ + open_flags = 'r' + if binary: + open_flags += 'b' + + # Check if it is an absolute path # XXX use relpath, add tests + if path.find(os.sep) >= 0: + # it's an absolute path? + distinfo_dirname, path = path.split(os.sep)[-2:] + if distinfo_dirname != self.path.split(os.sep)[-1]: + raise PackagingError( + 'dist-info file %r does not belong to the %r %s ' + 'distribution' % (path, self.name, self.version)) + + # The file must be relative + if path not in DIST_FILES: + raise PackagingError('invalid path for a dist-info file: %r' % + path) + + path = os.path.join(self.path, path) + return open(path, open_flags) + + def list_distinfo_files(self, local=False): + """ + Iterates over the ``RECORD`` entries and returns paths for each line if + the path is pointing to a file located in the ``.dist-info`` directory + or one of its subdirectories. + + :parameter local: If *local* is ``True``, each returned path is + transformed into a local absolute path. Otherwise the + raw value from ``RECORD`` is returned. + :type local: boolean + :returns: iterator of paths + """ + for path, checksum, size in self._get_records(local): + yield path + + def __eq__(self, other): + return isinstance(other, Distribution) and self.path == other.path + + # See http://docs.python.org/reference/datamodel#object.__hash__ + __hash__ = object.__hash__ + + +class EggInfoDistribution(object): + """Created with the *path* of the ``.egg-info`` directory or file provided + to the constructor. It reads the metadata contained in the file itself, or + if the given path happens to be a directory, the metadata is read from the + file ``PKG-INFO`` under that directory.""" + + name = '' + """The name of the distribution.""" + + version = '' + """The version of the distribution.""" + + metadata = None + """A :class:`distutils2.metadata.Metadata` instance loaded with + the distribution's ``METADATA`` file.""" + + _REQUIREMENT = re.compile( + r'(?P[-A-Za-z0-9_.]+)\s*' + r'(?P(?:<|<=|!=|==|>=|>)[-A-Za-z0-9_.]+)?\s*' + r'(?P(?:\s*,\s*(?:<|<=|!=|==|>=|>)[-A-Za-z0-9_.]+)*)\s*' + r'(?P\[.*\])?') + + def __init__(self, path): + self.path = path + if _cache_enabled and path in _cache_path_egg: + self.metadata = _cache_path_egg[path].metadata + self.name = self.metadata['Name'] + self.version = self.metadata['Version'] + return + + # reused from Distribute's pkg_resources + def yield_lines(strs): + """Yield non-empty/non-comment lines of a ``basestring`` + or sequence""" + if isinstance(strs, basestring): + for s in strs.splitlines(): + s = s.strip() + # skip blank lines/comments + if s and not s.startswith('#'): + yield s + else: + for ss in strs: + for s in yield_lines(ss): + yield s + + requires = None + + if path.endswith('.egg'): + if os.path.isdir(path): + meta_path = os.path.join(path, 'EGG-INFO', 'PKG-INFO') + self.metadata = Metadata(path=meta_path) + try: + req_path = os.path.join(path, 'EGG-INFO', 'requires.txt') + with open(req_path, 'r') as fp: + requires = fp.read() + except IOError: + requires = None + else: + # FIXME handle the case where zipfile is not available + zipf = zipimport.zipimporter(path) + fileobj = StringIO( + zipf.get_data('EGG-INFO/PKG-INFO').decode('utf8')) + self.metadata = Metadata(fileobj=fileobj) + try: + requires = zipf.get_data('EGG-INFO/requires.txt') + except IOError: + requires = None + self.name = self.metadata['Name'] + self.version = self.metadata['Version'] + + elif path.endswith('.egg-info'): + if os.path.isdir(path): + path = os.path.join(path, 'PKG-INFO') + try: + with open(os.path.join(path, 'requires.txt'), 'r') as fp: + requires = fp.read() + except IOError: + requires = None + self.metadata = Metadata(path=path) + self.name = self.metadata['Name'] + self.version = self.metadata['Version'] + + else: + raise ValueError('path must end with .egg-info or .egg, got %r' % + path) + + if requires is not None: + if self.metadata['Metadata-Version'] == '1.1': + # we can't have 1.1 metadata *and* Setuptools requires + for field in ('Obsoletes', 'Requires', 'Provides'): + del self.metadata[field] + + reqs = [] + + if requires is not None: + for line in yield_lines(requires): + if line.startswith('['): + logger.warning( + 'extensions in requires.txt are not supported ' + '(used by %r %s)', self.name, self.version) + break + else: + match = self._REQUIREMENT.match(line.strip()) + if not match: + # this happens when we encounter extras; since they + # are written at the end of the file we just exit + break + else: + if match.group('extras'): + msg = ('extra requirements are not supported ' + '(used by %r %s)', self.name, self.version) + logger.warning(msg, self.name) + name = match.group('name') + version = None + if match.group('first'): + version = match.group('first') + if match.group('rest'): + version += match.group('rest') + version = version.replace(' ', '') # trim spaces + if version is None: + reqs.append(name) + else: + reqs.append('%s (%s)' % (name, version)) + + if len(reqs) > 0: + self.metadata['Requires-Dist'] += reqs + + if _cache_enabled: + _cache_path_egg[self.path] = self + + def __repr__(self): + return '' % ( + self.name, self.version, self.path) + + def list_installed_files(self, local=False): + + def _md5(path): + with open(path, 'rb') as f: + content = f.read() + return md5(content).hexdigest() + + def _size(path): + return os.stat(path).st_size + + path = self.path + if local: + path = path.replace('/', os.sep) + + # XXX What about scripts and data files ? + if os.path.isfile(path): + return [(path, _md5(path), _size(path))] + else: + files = [] + for root, dir, files_ in os.walk(path): + for item in files_: + item = os.path.join(root, item) + files.append((item, _md5(item), _size(item))) + return files + + return [] + + def uses(self, path): + return False + + def __eq__(self, other): + return (isinstance(other, EggInfoDistribution) and + self.path == other.path) + + # See http://docs.python.org/reference/datamodel#object.__hash__ + __hash__ = object.__hash__ + + +def distinfo_dirname(name, version): + """ + The *name* and *version* parameters are converted into their + filename-escaped form, i.e. any ``'-'`` characters are replaced + with ``'_'`` other than the one in ``'dist-info'`` and the one + separating the name from the version number. + + :parameter name: is converted to a standard distribution name by replacing + any runs of non- alphanumeric characters with a single + ``'-'``. + :type name: string + :parameter version: is converted to a standard version string. Spaces + become dots, and all other non-alphanumeric characters + (except dots) become dashes, with runs of multiple + dashes condensed to a single dash. + :type version: string + :returns: directory name + :rtype: string""" + file_extension = '.dist-info' + name = name.replace('-', '_') + normalized_version = suggest_normalized_version(version) + # Because this is a lookup procedure, something will be returned even if + # it is a version that cannot be normalized + if normalized_version is None: + # Unable to achieve normality? + normalized_version = version + return '-'.join([name, normalized_version]) + file_extension + + +def get_distributions(use_egg_info=False, paths=None): + """ + Provides an iterator that looks for ``.dist-info`` directories in + ``sys.path`` and returns :class:`Distribution` instances for each one of + them. If the parameters *use_egg_info* is ``True``, then the ``.egg-info`` + files and directores are iterated as well. + + :rtype: iterator of :class:`Distribution` and :class:`EggInfoDistribution` + instances + """ + if paths is None: + paths = sys.path + + if not _cache_enabled: + for dist in _yield_distributions(True, use_egg_info, paths): + yield dist + else: + _generate_cache(use_egg_info, paths) + + for dist in _cache_path.values(): + yield dist + + if use_egg_info: + for dist in _cache_path_egg.values(): + yield dist + + +def get_distribution(name, use_egg_info=False, paths=None): + """ + Scans all elements in ``sys.path`` and looks for all directories + ending with ``.dist-info``. Returns a :class:`Distribution` + corresponding to the ``.dist-info`` directory that contains the + ``METADATA`` that matches *name* for the *name* metadata field. + If no distribution exists with the given *name* and the parameter + *use_egg_info* is set to ``True``, then all files and directories ending + with ``.egg-info`` are scanned. A :class:`EggInfoDistribution` instance is + returned if one is found that has metadata that matches *name* for the + *name* metadata field. + + This function only returns the first result found, as no more than one + value is expected. If the directory is not found, ``None`` is returned. + + :rtype: :class:`Distribution` or :class:`EggInfoDistribution` or None + """ + if paths is None: + paths = sys.path + + if not _cache_enabled: + for dist in _yield_distributions(True, use_egg_info, paths): + if dist.name == name: + return dist + else: + _generate_cache(use_egg_info, paths) + + if name in _cache_name: + return _cache_name[name][0] + elif use_egg_info and name in _cache_name_egg: + return _cache_name_egg[name][0] + else: + return None + + +def obsoletes_distribution(name, version=None, use_egg_info=False): + """ + Iterates over all distributions to find which distributions obsolete + *name*. + + If a *version* is provided, it will be used to filter the results. + If the argument *use_egg_info* is set to ``True``, then ``.egg-info`` + distributions will be considered as well. + + :type name: string + :type version: string + :parameter name: + """ + for dist in get_distributions(use_egg_info): + obsoleted = (dist.metadata['Obsoletes-Dist'] + + dist.metadata['Obsoletes']) + for obs in obsoleted: + o_components = obs.split(' ', 1) + if len(o_components) == 1 or version is None: + if name == o_components[0]: + yield dist + break + else: + try: + predicate = VersionPredicate(obs) + except ValueError: + raise PackagingError( + 'distribution %r has ill-formed obsoletes field: ' + '%r' % (dist.name, obs)) + if name == o_components[0] and predicate.match(version): + yield dist + break + + +def provides_distribution(name, version=None, use_egg_info=False): + """ + Iterates over all distributions to find which distributions provide *name*. + If a *version* is provided, it will be used to filter the results. Scans + all elements in ``sys.path`` and looks for all directories ending with + ``.dist-info``. Returns a :class:`Distribution` corresponding to the + ``.dist-info`` directory that contains a ``METADATA`` that matches *name* + for the name metadata. If the argument *use_egg_info* is set to ``True``, + then all files and directories ending with ``.egg-info`` are considered + as well and returns an :class:`EggInfoDistribution` instance. + + This function only returns the first result found, since no more than + one values are expected. If the directory is not found, returns ``None``. + + :parameter version: a version specifier that indicates the version + required, conforming to the format in ``PEP-345`` + + :type name: string + :type version: string + """ + predicate = None + if not version is None: + try: + predicate = VersionPredicate(name + ' (' + version + ')') + except ValueError: + raise PackagingError('invalid name or version: %r, %r' % + (name, version)) + + for dist in get_distributions(use_egg_info): + provided = dist.metadata['Provides-Dist'] + dist.metadata['Provides'] + + for p in provided: + p_components = p.rsplit(' ', 1) + if len(p_components) == 1 or predicate is None: + if name == p_components[0]: + yield dist + break + else: + p_name, p_ver = p_components + if len(p_ver) < 2 or p_ver[0] != '(' or p_ver[-1] != ')': + raise PackagingError( + 'distribution %r has invalid Provides field: %r' % + (dist.name, p)) + p_ver = p_ver[1:-1] # trim off the parenthesis + if p_name == name and predicate.match(p_ver): + yield dist + break + + +def get_file_users(path): + """ + Iterates over all distributions to find out which distributions use + *path*. + + :parameter path: can be a local absolute path or a relative + ``'/'``-separated path. + :type path: string + :rtype: iterator of :class:`Distribution` instances + """ + for dist in get_distributions(): + if dist.uses(path): + yield dist + + +def get_file_path(distribution_name, relative_path): + """Return the path to a resource file.""" + dist = get_distribution(distribution_name) + if dist is not None: + return dist.get_resource_path(relative_path) + raise LookupError('no distribution named %r found' % distribution_name) + + +def get_file(distribution_name, relative_path, *args, **kwargs): + """Open and return a resource file.""" + return open(get_file_path(distribution_name, relative_path), + *args, **kwargs) diff --git a/distutils2/depgraph.py b/distutils2/depgraph.py --- a/distutils2/depgraph.py +++ b/distutils2/depgraph.py @@ -1,29 +1,34 @@ -"""Analyse the relationships between the distributions in the system -and generate a dependency graph. +"""Class and functions dealing with dependencies between distributions. + +This module provides a DependencyGraph class to represent the +dependencies between distributions. Auxiliary functions can generate a +graph, find reverse dependencies, and print a graph in DOT format. """ + import sys + from StringIO import StringIO -from distutils2.errors import DistutilsError +from distutils2.errors import PackagingError from distutils2.version import VersionPredicate, IrrationalVersionError __all__ = ['DependencyGraph', 'generate_graph', 'dependent_dists', 'graph_to_dot'] -class DependencyGraph(object): +class DependencyGraph: """ Represents a dependency graph between distributions. The dependency relationships are stored in an ``adjacency_list`` that maps distributions to a list of ``(other, label)`` tuples where ``other`` - is a distribution and the edge is labelled with ``label`` (i.e. the version + is a distribution and the edge is labeled with ``label`` (i.e. the version specifier, if such was provided). Also, for more efficient traversal, for every distribution ``x``, a list of predecessors is kept in ``reverse_list[x]``. An edge from distribution ``a`` to distribution ``b`` means that ``a`` depends on ``b``. If any missing - depencies are found, they are stored in ``missing``, which is a dictionary - that maps distributions to a list of requirements that were not provided by - any other distributions. + dependencies are found, they are stored in ``missing``, which is a + dictionary that maps distributions to a list of requirements that were not + provided by any other distributions. """ def __init__(self): @@ -34,40 +39,40 @@ def add_distribution(self, distribution): """Add the *distribution* to the graph. - :type distribution: :class:`pkgutil.Distribution` or - :class:`pkgutil.EggInfoDistribution` + :type distribution: :class:`distutils2.database.Distribution` or + :class:`distutils2.database.EggInfoDistribution` """ - self.adjacency_list[distribution] = list() - self.reverse_list[distribution] = list() - self.missing[distribution] = list() + self.adjacency_list[distribution] = [] + self.reverse_list[distribution] = [] + self.missing[distribution] = [] def add_edge(self, x, y, label=None): """Add an edge from distribution *x* to distribution *y* with the given *label*. - :type x: :class:`pkgutil.Distribution` or - :class:`pkgutil.EggInfoDistribution` - :type y: :class:`pkgutil.Distribution` or - :class:`pkgutil.EggInfoDistribution` + :type x: :class:`distutils2.database.Distribution` or + :class:`distutils2.database.EggInfoDistribution` + :type y: :class:`distutils2.database.Distribution` or + :class:`distutils2.database.EggInfoDistribution` :type label: ``str`` or ``None`` """ self.adjacency_list[x].append((y, label)) # multiple edges are allowed, so be careful - if not x in self.reverse_list[y]: + if x not in self.reverse_list[y]: self.reverse_list[y].append(x) def add_missing(self, distribution, requirement): """ Add a missing *requirement* for the given *distribution*. - :type distribution: :class:`pkgutil.Distribution` or - :class:`pkgutil.EggInfoDistribution` + :type distribution: :class:`distutils2.database.Distribution` or + :class:`distutils2.database.EggInfoDistribution` :type requirement: ``str`` """ self.missing[distribution].append(requirement) def _repr_dist(self, dist): - return '%s %s' % (dist.name, dist.metadata['Version']) + return '%r %s' % (dist.name, dist.version) def repr_node(self, dist, level=1): """Prints only a subgraph""" @@ -77,7 +82,7 @@ dist = self._repr_dist(other) if label is not None: dist = '%s [%s]' % (dist, label) - output.append(' ' * level + '%s' % dist) + output.append(' ' * level + str(dist)) suboutput = self.repr_node(other, level + 1) subs = suboutput.split('\n') output.extend(subs[1:]) @@ -86,7 +91,7 @@ def __repr__(self): """Representation of the graph""" output = [] - for dist, adjs in self.adjacency_list.iteritems(): + for dist, adjs in self.adjacency_list.items(): output.append(self.repr_node(dist)) return '\n'.join(output) @@ -102,46 +107,45 @@ """ disconnected = [] - f.write("digraph dependencies {\n") - for dist, adjs in graph.adjacency_list.iteritems(): + f.write(u"digraph dependencies {\n") + for dist, adjs in graph.adjacency_list.items(): if len(adjs) == 0 and not skip_disconnected: disconnected.append(dist) - for (other, label) in adjs: + for other, label in adjs: if not label is None: - f.write('"%s" -> "%s" [label="%s"]\n' % + f.write(u'"%s" -> "%s" [label="%s"]\n' % (dist.name, other.name, label)) else: - f.write('"%s" -> "%s"\n' % (dist.name, other.name)) + f.write(u'"%s" -> "%s"\n' % (dist.name, other.name)) if not skip_disconnected and len(disconnected) > 0: - f.write('subgraph disconnected {\n') - f.write('label = "Disconnected"\n') - f.write('bgcolor = red\n') + f.write(u'subgraph disconnected {\n') + f.write(u'label = "Disconnected"\n') + f.write(u'bgcolor = red\n') for dist in disconnected: - f.write('"%s"' % dist.name) - f.write('\n') - f.write('}\n') - f.write('}\n') + f.write(u'"%s"' % dist.name) + f.write(u'\n') + f.write(u'}\n') + f.write(u'}\n') def generate_graph(dists): """Generates a dependency graph from the given distributions. :parameter dists: a list of distributions - :type dists: list of :class:`pkgutil.Distribution` and - :class:`pkgutil.EggInfoDistribution` instances - :rtype: an :class:`DependencyGraph` instance + :type dists: list of :class:`distutils2.database.Distribution` and + :class:`distutils2.database.EggInfoDistribution` instances + :rtype: a :class:`DependencyGraph` instance """ graph = DependencyGraph() provided = {} # maps names to lists of (version, dist) tuples - dists = list(dists) # maybe use generator_tools in future # first, build the graph and find out the provides for dist in dists: graph.add_distribution(dist) provides = (dist.metadata['Provides-Dist'] + dist.metadata['Provides'] + - ['%s (%s)' % (dist.name, dist.metadata['Version'])]) + ['%s (%s)' % (dist.name, dist.version)]) for p in provides: comps = p.strip().rsplit(" ", 1) @@ -150,10 +154,10 @@ if len(comps) == 2: version = comps[1] if len(version) < 3 or version[0] != '(' or version[-1] != ')': - raise DistutilsError('Distribution %s has ill formed' \ - 'provides field: %s' % (dist.name, p)) + raise PackagingError('distribution %r has ill-formed' + 'provides field: %r' % (dist.name, p)) version = version[1:-1] # trim off parenthesis - if not name in provided: + if name not in provided: provided[name] = [] provided[name].append((version, dist)) @@ -170,7 +174,7 @@ name = predicate.name - if not name in provided: + if name not in provided: graph.add_missing(dist, req) else: matched = False @@ -200,8 +204,9 @@ :param dists: a list of distributions :param dist: a distribution, member of *dists* for which we are interested """ - if not dist in dists: - raise ValueError('The given distribution is not a member of the list') + if dist not in dists: + raise ValueError('given distribution %r is not a member of the list' % + dist.name) graph = generate_graph(dists) dep = [dist] # dependent distributions @@ -211,7 +216,7 @@ node = fringe.pop() dep.append(node) for prev in graph.reverse_list[node]: - if not prev in dep: + if prev not in dep: fringe.append(prev) dep.pop(0) # remove dist from dep, was there to prevent infinite loops @@ -219,7 +224,7 @@ def main(): - from distutils2._backport.pkgutil import get_distributions + from distutils2.database import get_distributions tempout = StringIO() try: old = sys.stderr @@ -229,20 +234,23 @@ graph = generate_graph(dists) finally: sys.stderr = old - except Exception, e: + except Exception: + e = sys.exc_info()[1] tempout.seek(0) tempout = tempout.read() - print('Could not generate the graph\n%s\n%s\n' % (tempout, str(e))) + print(u'Could not generate the graph') + print(tempout) + print(e) sys.exit(1) - for dist, reqs in graph.missing.iteritems(): + for dist, reqs in graph.missing.items(): if len(reqs) > 0: - print("Warning: Missing dependencies for %s: %s" % (dist.name, - ", ".join(reqs))) + print(u"Warning: Missing dependencies for %r:" % dist.name, + ", ".join(reqs)) # XXX replace with argparse if len(sys.argv) == 1: - print('Dependency graph:') - print(' ' + repr(graph).replace('\n', '\n ')) + print(u'Dependency graph:') + print(u' ', repr(graph).replace(u'\n', u'\n ')) sys.exit(0) elif len(sys.argv) > 1 and sys.argv[1] in ('-d', '--dot'): if len(sys.argv) > 2: @@ -250,15 +258,12 @@ else: filename = 'depgraph.dot' - f = open(filename, 'w') - try: + with open(filename, 'w') as f: graph_to_dot(graph, f, True) - finally: - f.close() tempout.seek(0) tempout = tempout.read() print(tempout) - print('Dot file written at "%s"' % filename) + print('Dot file written at %r' % filename) sys.exit(0) else: print('Supported option: -d [filename]') diff --git a/distutils2/dist.py b/distutils2/dist.py --- a/distutils2/dist.py +++ b/distutils2/dist.py @@ -1,17 +1,11 @@ -"""distutils.dist - -Provides the Distribution class, which represents the module distribution -being built/installed/distributed. -""" - +"""Class representing the distribution being built/installed/etc.""" import os import re -import warnings -import logging +import sys -from distutils2.errors import (DistutilsOptionError, DistutilsArgError, - DistutilsModuleError, DistutilsClassError) +from distutils2.errors import (PackagingOptionError, PackagingArgError, + PackagingModuleError, PackagingClassError) from distutils2.fancy_getopt import FancyGetopt from distutils2.util import strtobool, resolve_name from distutils2 import logger @@ -19,7 +13,7 @@ from distutils2.config import Config from distutils2.command import get_command_class, STANDARD_COMMANDS -# Regex to define acceptable Distutils command names. This is not *quite* +# Regex to define acceptable Packaging command names. This is not *quite* # the same as a Python NAME -- I don't allow leading underscores. The fact # that they're very similar is no coincidence; the default naming scheme is # to look for a Python module named after the command. @@ -32,14 +26,16 @@ or: %(script)s cmd --help """ + def gen_usage(script_name): script = os.path.basename(script_name) return USAGE % {'script': script} + class Distribution(object): - """The core of the Distutils. Most of the work hiding behind 'setup' + """The core of the Packaging. Most of the work hiding behind 'setup' is really done within a Distribution instance, which farms the work out - to the Distutils commands specified on the command line. + to the Packaging commands specified on the command line. Setup scripts will almost never instantiate Distribution directly, unless the 'setup()' function is totally inadequate to their needs. @@ -52,33 +48,31 @@ # 'global_options' describes the command-line options that may be # supplied to the setup script prior to any actual commands. - # Eg. "./setup.py -n" or "./setup.py --quiet" both take advantage of + # Eg. "pysetup -n" or "pysetup --dry-run" both take advantage of # these global options. This list should be kept to a bare minimum, # since every global option is also valid as a command option -- and we # don't want to pollute the commands with too many options that they # have minimal control over. - # The fourth entry for verbose means that it can be repeated. - global_options = [('verbose', 'v', "run verbosely (default)", 1), - ('quiet', 'q', "run quietly (turns verbosity off)"), - ('dry-run', 'n', "don't actually do anything"), - ('help', 'h', "show detailed help message"), - ('no-user-cfg', None, - 'ignore pydistutils.cfg in your home directory'), + global_options = [ + ('dry-run', 'n', "don't actually do anything"), + ('help', 'h', "show detailed help message"), + ('no-user-cfg', None, 'ignore pydistutils.cfg in your home directory'), ] # 'common_usage' is a short (2-3 line) string describing the common # usage of the setup script. - common_usage = """\ + common_usage = u"""\ Common commands: (see '--help-commands' for more) - setup.py build will build the package underneath 'build/' - setup.py install will install the package + pysetup run build will build the package underneath 'build/' + pysetup run install will install the package """ # options that are not propagated to the commands display_options = [ ('help-commands', None, "list all available commands"), + # XXX this is obsoleted by the pysetup metadata action ('name', None, "print package name"), ('version', 'V', @@ -127,7 +121,7 @@ display_option_names = [x[0].replace('-', '_') for x in display_options] # negative options are options that exclude other options - negative_opt = {'quiet': 'verbose'} + negative_opt = {} # -- Creation/initialization methods ------------------------------- def __init__(self, attrs=None): @@ -142,11 +136,10 @@ """ # Default values for our command-line options - self.verbose = 1 - self.dry_run = 0 - self.help = 0 + self.dry_run = False + self.help = False for attr in self.display_option_names: - setattr(self, attr, 0) + setattr(self, attr, False) # Store the configuration self.config = Config(self) @@ -237,21 +230,21 @@ options = attrs.get('options') if options is not None: del attrs['options'] - for (command, cmd_options) in options.iteritems(): + for command, cmd_options in options.items(): opt_dict = self.get_option_dict(command) - for (opt, val) in cmd_options.iteritems(): + for opt, val in cmd_options.items(): opt_dict[opt] = ("setup script", val) # Now work on the rest of the attributes. Any attribute that's # not already defined is invalid! - for key, val in attrs.iteritems(): + for key, val in attrs.items(): if self.metadata.is_metadata_field(key): self.metadata[key] = val elif hasattr(self, key): setattr(self, key, val) else: - msg = "Unknown distribution option: %r" % key - warnings.warn(msg) + logger.warning( + 'unknown argument given to Distribution: %r', key) # no-user-cfg is handled before other command line args # because other args override the config files, and this @@ -292,24 +285,23 @@ commands = sorted(self.command_options) if header is not None: - self.announce(indent + header) + logger.info(indent + header) indent = indent + " " if not commands: - self.announce(indent + "no commands known yet") + logger.info(indent + "no commands known yet") return for cmd_name in commands: opt_dict = self.command_options.get(cmd_name) if opt_dict is None: - self.announce(indent + - "no option dict for %r command" % cmd_name) + logger.info(indent + "no option dict for %r command", + cmd_name) else: - self.announce(indent + - "option dict for %r command:" % cmd_name) + logger.info(indent + "option dict for %r command:", cmd_name) out = pformat(opt_dict) for line in out.split('\n'): - self.announce(indent + " " + line) + logger.info(indent + " " + line) # -- Config file finding/parsing methods --------------------------- # XXX to be removed @@ -326,15 +318,15 @@ 'script_args' instance attribute (which defaults to 'sys.argv[1:]' -- see 'setup()' in run.py). This list is first processed for "global options" -- options that set attributes of the Distribution - instance. Then, it is alternately scanned for Distutils commands + instance. Then, it is alternately scanned for Packaging commands and options for that command. Each new command terminates the options for the previous command. The allowed options for a command are determined by the 'user_options' attribute of the command class -- thus, we have to be able to load command classes in order to parse the command line. Any error in that 'options' - attribute raises DistutilsGetoptError; any error on the - command line raises DistutilsArgError. If no Distutils commands - were found on the command line, raises DistutilsArgError. Return + attribute raises PackagingGetoptError; any error on the + command line raises PackagingArgError. If no Packaging commands + were found on the command line, raises PackagingArgError. Return true if command line was successfully parsed and we should carry on with executing commands; false if no errors but we shouldn't execute commands (currently, this only happens if user asks for @@ -360,14 +352,6 @@ args = parser.getopt(args=self.script_args, object=self) option_order = parser.get_option_order() - handler = logging.StreamHandler() - logger.addHandler(handler) - - if self.verbose: - handler.setLevel(logging.DEBUG) - else: - handler.setLevel(logging.INFO) - # for display options we return immediately if self.handle_display_options(option_order): return @@ -378,8 +362,8 @@ return # Handle the cases of --help as a "global" option, ie. - # "setup.py --help" and "setup.py --help command ...". For the - # former, we show global options (--verbose, --dry-run, etc.) + # "pysetup run --help" and "pysetup run --help command ...". For the + # former, we show global options (--dry-run, etc.) # and display-only options (--name, --version, etc.); for the # latter, we omit the display-only options and show help for # each command listed on the command line. @@ -419,24 +403,24 @@ # it takes. try: cmd_class = get_command_class(command) - except DistutilsModuleError, msg: - raise DistutilsArgError(msg) + except PackagingModuleError: + raise PackagingArgError(sys.exc_info()[1]) - # XXX We want to push this in distutils.command + # XXX We want to push this in distutils2.command # # Require that the command class be derived from Command -- want # to be sure that the basic "command" interface is implemented. for meth in ('initialize_options', 'finalize_options', 'run'): if hasattr(cmd_class, meth): continue - raise DistutilsClassError( + raise PackagingClassError( 'command %r must implement %r' % (cmd_class, meth)) # Also make sure that the command object provides a list of its # known options. if not (hasattr(cmd_class, 'user_options') and isinstance(cmd_class.user_options, list)): - raise DistutilsClassError( + raise PackagingClassError( "command class %s must provide " "'user_options' attribute (a list of tuples)" % cmd_class) @@ -451,7 +435,7 @@ # format (tuple of four) so we need to preprocess them here. if (hasattr(cmd_class, 'help_options') and isinstance(cmd_class.help_options, list)): - help_options = fix_help_options(cmd_class.help_options) + help_options = cmd_class.help_options[:] else: help_options = [] @@ -461,21 +445,22 @@ cmd_class.user_options + help_options) parser.set_negative_aliases(negative_opt) - (args, opts) = parser.getopt(args[1:]) + args, opts = parser.getopt(args[1:]) if hasattr(opts, 'help') and opts.help: - self._show_help(parser, display_options=0, commands=[cmd_class]) + self._show_help(parser, display_options=False, + commands=[cmd_class]) return if (hasattr(cmd_class, 'help_options') and isinstance(cmd_class.help_options, list)): - help_option_found = 0 - for (help_option, short, desc, func) in cmd_class.help_options: + help_option_found = False + for help_option, short, desc, func in cmd_class.help_options: if hasattr(opts, help_option.replace('-', '_')): - help_option_found = 1 + help_option_found = True if hasattr(func, '__call__'): func() else: - raise DistutilsClassError( + raise PackagingClassError( "invalid help function %r for help option %r: " "must be a callable object (function, etc.)" % (func, help_option)) @@ -486,7 +471,7 @@ # Put the options from the command line into their official # holding pen, the 'command_options' dictionary. opt_dict = self.get_option_dict(command) - for (name, value) in vars(opts).iteritems(): + for name, value in vars(opts).items(): opt_dict[name] = ("command line", value) return args @@ -502,7 +487,7 @@ else: self.convert_2to3_doctests = [] - def _show_help(self, parser, global_options=1, display_options=1, + def _show_help(self, parser, global_options=True, display_options=True, commands=[]): """Show help for the setup script command line in the form of several lists of command-line options. 'parser' should be a @@ -511,7 +496,7 @@ generate the correct help text. If 'global_options' is true, lists the global options: - --verbose, --dry-run, etc. If 'display_options' is true, lists + --dry-run, etc. If 'display_options' is true, lists the "display-only" options: --name, --version, etc. Finally, lists per-command help for every command name or command class in 'commands'. @@ -526,14 +511,14 @@ options = self.global_options parser.set_option_table(options) parser.print_help(self.common_usage + "\nGlobal options:") - print('') + print(u'') if display_options: parser.set_option_table(self.display_options) parser.print_help( "Information display options (just display " + "information, ignore any commands)") - print('') + print(u'') for command in self.commands: if isinstance(command, type) and issubclass(command, Command): @@ -542,12 +527,11 @@ cls = get_command_class(command) if (hasattr(cls, 'help_options') and isinstance(cls.help_options, list)): - parser.set_option_table(cls.user_options + - fix_help_options(cls.help_options)) + parser.set_option_table(cls.user_options + cls.help_options) else: parser.set_option_table(cls.user_options) parser.print_help("Options for %r command:" % cls.__name__) - print('') + print(u'') print(gen_usage(self.script_name)) @@ -562,30 +546,30 @@ # we ignore "foo bar"). if self.help_commands: self.print_commands() - print('') + print() print(gen_usage(self.script_name)) return 1 # If user supplied any of the "display metadata" options, then # display that metadata in the order in which the user supplied the # metadata options. - any_display_options = 0 - is_display_option = {} + any_display_options = False + is_display_option = set() for option in self.display_options: - is_display_option[option[0]] = 1 + is_display_option.add(option[0]) for opt, val in option_order: - if val and is_display_option.get(opt): + if val and opt in is_display_option: opt = opt.replace('-', '_') value = self.metadata[opt] - if opt in ['keywords', 'platform']: + if opt in ('keywords', 'platform'): print(','.join(value)) elif opt in ('classifier', 'provides', 'requires', 'obsoletes'): print('\n'.join(value)) else: print(value) - any_display_options = 1 + any_display_options = True return any_display_options @@ -630,14 +614,14 @@ "Standard commands", max_length) if extra_commands: - print + print() self.print_command_list(extra_commands, "Extra commands", max_length) # -- Command class/object methods ---------------------------------- - def get_command_obj(self, command, create=1): + def get_command_obj(self, command, create=True): """Return the command object for 'command'. Normally this object is cached on a previous call to 'get_command_obj()'; if no command object for 'command' is in the cache, then we either create and @@ -660,7 +644,6 @@ options = self.command_options.get(command) if options: self._set_command_options(cmd_obj, options) - return cmd_obj def _set_command_options(self, command_obj, option_dict=None): @@ -678,7 +661,7 @@ logger.debug(" setting options for %r command:", command_name) - for (option, (source, value)) in option_dict.iteritems(): + for option, (source, value) in option_dict.items(): logger.debug(" %s = %s (from %s)", option, value, source) try: bool_opts = [x.replace('-', '_') @@ -691,7 +674,7 @@ neg_opt = {} try: - is_string = isinstance(value, str) + is_string = isinstance(value, basestring) if option in neg_opt and is_string: setattr(command_obj, neg_opt[option], not strtobool(value)) elif option in bool_opts and is_string: @@ -699,13 +682,13 @@ elif hasattr(command_obj, option): setattr(command_obj, option, value) else: - raise DistutilsOptionError( + raise PackagingOptionError( "error in %s: command %r has no such option %r" % (source, command_name, option)) - except ValueError, msg: - raise DistutilsOptionError(msg) + except ValueError: + raise PackagingOptionError(sys.exc_info()[1]) - def get_reinitialized_command(self, command, reinit_subcommands=0): + def get_reinitialized_command(self, command, reinit_subcommands=False): """Reinitializes a command to the state it was in when first returned by 'get_command_obj()': ie., initialized but not yet finalized. This provides the opportunity to sneak option @@ -734,8 +717,8 @@ if not command.finalized: return command command.initialize_options() - command.finalized = 0 self.have_run[command_name] = 0 + command.finalized = False self._set_command_options(command) if reinit_subcommands: @@ -746,9 +729,6 @@ # -- Methods that operate on the Distribution ---------------------- - def announce(self, msg, level=logging.INFO): - logger.log(level, msg) - def run_commands(self): """Run each command that was seen on the setup script command line. Uses the list of commands found and cache of command objects @@ -796,17 +776,17 @@ if hooks is None: return - for hook in hooks.itervalues(): + for hook in hooks.values(): if isinstance(hook, basestring): try: hook_obj = resolve_name(hook) - except ImportError, e: - raise DistutilsModuleError(e) + except ImportError: + raise PackagingModuleError(sys.exc_info()[1]) else: hook_obj = hook if not hasattr(hook_obj, '__call__'): - raise DistutilsOptionError('hook %r is not callable' % hook) + raise PackagingOptionError('hook %r is not callable' % hook) logger.info('running %s %s for command %s', hook_kind, hook, cmd_obj.get_command_name()) @@ -838,15 +818,3 @@ return (self.has_pure_modules() and not self.has_ext_modules() and not self.has_c_libraries()) - - -# XXX keep for compat or remove? - -def fix_help_options(options): - """Convert a 4-tuple 'help_options' list as found in various command - classes to the 3-tuple form required by FancyGetopt. - """ - new_options = [] - for help_tuple in options: - new_options.append(help_tuple[0:3]) - return new_options diff --git a/distutils2/errors.py b/distutils2/errors.py --- a/distutils2/errors.py +++ b/distutils2/errors.py @@ -1,84 +1,82 @@ -"""distutils.errors +"""Exceptions used throughout the package. -Provides exceptions used by the Distutils modules. Note that Distutils -modules may raise standard exceptions; in particular, SystemExit is -usually raised for errors that are obviously the end-user's fault -(eg. bad command-line arguments). +Submodules of distutils2 may raise exceptions defined in this module as +well as standard exceptions; in particular, SystemExit is usually raised +for errors that are obviously the end-user's fault (e.g. bad +command-line arguments). +""" -This module is safe to use in "from ... import *" mode; it only exports -symbols whose names start with "Distutils" and end with "Error".""" +class PackagingError(Exception): + """The root of all Packaging evil.""" -class DistutilsError(Exception): - """The root of all Distutils evil.""" - -class DistutilsModuleError(DistutilsError): +class PackagingModuleError(PackagingError): """Unable to load an expected module, or to find an expected class within some module (in particular, command modules and classes).""" -class DistutilsClassError(DistutilsError): +class PackagingClassError(PackagingError): """Some command class (or possibly distribution class, if anyone feels a need to subclass Distribution) is found not to be holding up its end of the bargain, ie. implementing some part of the "command "interface.""" -class DistutilsGetoptError(DistutilsError): +class PackagingGetoptError(PackagingError): """The option table provided to 'fancy_getopt()' is bogus.""" -class DistutilsArgError(DistutilsError): +class PackagingArgError(PackagingError): """Raised by fancy_getopt in response to getopt.error -- ie. an error in the command line usage.""" -class DistutilsFileError(DistutilsError): +class PackagingFileError(PackagingError): """Any problems in the filesystem: expected file not found, etc. Typically this is for problems that we detect before IOError or OSError could be raised.""" -class DistutilsOptionError(DistutilsError): +class PackagingOptionError(PackagingError): """Syntactic/semantic errors in command options, such as use of mutually conflicting options, or inconsistent options, badly-spelled values, etc. No distinction is made between option values originating in the setup script, the command line, config files, or what-have-you -- but if we *know* something originated in - the setup script, we'll raise DistutilsSetupError instead.""" + the setup script, we'll raise PackagingSetupError instead.""" -class DistutilsSetupError(DistutilsError): +class PackagingSetupError(PackagingError): """For errors that can be definitely blamed on the setup script, such as invalid keyword arguments to 'setup()'.""" -class DistutilsPlatformError(DistutilsError): +class PackagingPlatformError(PackagingError): """We don't know how to do something on the current platform (but we do know how to do it on some platform) -- eg. trying to compile C files on a platform not supported by a CCompiler subclass.""" -class DistutilsExecError(DistutilsError): +class PackagingExecError(PackagingError): """Any problems executing an external program (such as the C compiler, when compiling C files).""" -class DistutilsInternalError(DistutilsError): +class PackagingInternalError(PackagingError): """Internal inconsistencies or impossibilities (obviously, this should never be seen if the code is working!).""" -class DistutilsTemplateError(DistutilsError): +class PackagingTemplateError(PackagingError): """Syntax error in a file list template.""" -class DistutilsByteCompileError(DistutilsError): +class PackagingByteCompileError(PackagingError): """Byte compile error.""" -class DistutilsIndexError(DistutilsError): +class PackagingPyPIError(PackagingError): """Any problem occuring during using the indexes.""" @@ -109,15 +107,15 @@ """Attempt to process an unknown file type.""" -class MetadataMissingError(DistutilsError): +class MetadataMissingError(PackagingError): """A required metadata is missing""" -class MetadataConflictError(DistutilsError): +class MetadataConflictError(PackagingError): """Attempt to read or write metadata fields that are conflictual.""" -class MetadataUnrecognizedVersionError(DistutilsError): +class MetadataUnrecognizedVersionError(PackagingError): """Unknown metadata version number.""" diff --git a/distutils2/fancy_getopt.py b/distutils2/fancy_getopt.py --- a/distutils2/fancy_getopt.py +++ b/distutils2/fancy_getopt.py @@ -1,22 +1,23 @@ -"""distutils.fancy_getopt +"""Command line parsing machinery. -Wrapper around the standard getopt module that provides the following -additional features: +The FancyGetopt class is a Wrapper around the getopt module that +provides the following additional features: * short and long options are tied together * options have help strings, so fancy_getopt could potentially create a complete usage summary - * options set attributes of a passed-in object + * options set attributes of a passed-in object. + +It is used under the hood by the command classes. Do not use directly. """ +import getopt +import re +import sys +import textwrap -import sys -import string -import re -import getopt -import textwrap -from distutils2.errors import DistutilsGetoptError, DistutilsArgError +from distutils2.errors import PackagingGetoptError, PackagingArgError -# Much like command_re in distutils.core, this is close to but not quite +# Much like command_re in distutils2.core, this is close to but not quite # the same as a Python NAME -- except, in the spirit of most GNU # utilities, we use '-' in place of '_'. (The spirit of LISP lives on!) # The similarities to NAME are again not a coincidence... @@ -38,6 +39,7 @@ --quiet is the "negative alias" of --verbose, then "--quiet" on the command line sets 'verbose' to false """ + def __init__(self, option_table=None): # The option table is (currently) a list of tuples. The @@ -90,7 +92,7 @@ def add_option(self, long_option, short_option=None, help_string=None): if long_option in self.option_index: - raise DistutilsGetoptError( + raise PackagingGetoptError( "option conflict: already an option '%s'" % long_option) else: option = (long_option, short_option, help_string) @@ -104,13 +106,13 @@ def _check_alias_dict(self, aliases, what): assert isinstance(aliases, dict) - for (alias, opt) in aliases.iteritems(): + for alias, opt in aliases.items(): if alias not in self.option_index: - raise DistutilsGetoptError( + raise PackagingGetoptError( ("invalid %s '%s': " "option '%s' not defined") % (what, alias, alias)) if opt not in self.option_index: - raise DistutilsGetoptError( + raise PackagingGetoptError( ("invalid %s '%s': " "aliased option '%s' not defined") % (what, alias, opt)) @@ -139,76 +141,76 @@ for option in self.option_table: if len(option) == 3: - long, short, help = option + longopt, short, help = option repeat = 0 elif len(option) == 4: - long, short, help, repeat = option + longopt, short, help, repeat = option else: # the option table is part of the code, so simply # assert that it is correct raise ValueError("invalid option tuple: %r" % option) # Type- and value-check the option names - if not isinstance(long, str) or len(long) < 2: - raise DistutilsGetoptError( + if not isinstance(longopt, basestring) or len(longopt) < 2: + raise PackagingGetoptError( ("invalid long option '%s': " - "must be a string of length >= 2") % long) + "must be a string of length >= 2") % longopt) if (not ((short is None) or - (isinstance(short, str) and len(short) == 1))): - raise DistutilsGetoptError( + (isinstance(short, basestring) and len(short) == 1))): + raise PackagingGetoptError( ("invalid short option '%s': " - "must a single character or None") % short) + "must be a single character or None") % short) - self.repeat[long] = repeat - self.long_opts.append(long) + self.repeat[longopt] = repeat + self.long_opts.append(longopt) - if long[-1] == '=': # option takes an argument? + if longopt[-1] == '=': # option takes an argument? if short: short = short + ':' - long = long[0:-1] - self.takes_arg[long] = 1 + longopt = longopt[0:-1] + self.takes_arg[longopt] = 1 else: # Is option is a "negative alias" for some other option (eg. # "quiet" == "!verbose")? - alias_to = self.negative_alias.get(long) + alias_to = self.negative_alias.get(longopt) if alias_to is not None: if self.takes_arg[alias_to]: - raise DistutilsGetoptError( + raise PackagingGetoptError( ("invalid negative alias '%s': " "aliased option '%s' takes a value") % \ - (long, alias_to)) + (longopt, alias_to)) - self.long_opts[-1] = long # XXX redundant?! - self.takes_arg[long] = 0 + self.long_opts[-1] = longopt # XXX redundant?! + self.takes_arg[longopt] = 0 else: - self.takes_arg[long] = 0 + self.takes_arg[longopt] = 0 # If this is an alias option, make sure its "takes arg" flag is # the same as the option it's aliased to. - alias_to = self.alias.get(long) + alias_to = self.alias.get(longopt) if alias_to is not None: - if self.takes_arg[long] != self.takes_arg[alias_to]: - raise DistutilsGetoptError( + if self.takes_arg[longopt] != self.takes_arg[alias_to]: + raise PackagingGetoptError( ("invalid alias '%s': inconsistent with " "aliased option '%s' (one of them takes a value, " - "the other doesn't") % (long, alias_to)) + "the other doesn't") % (longopt, alias_to)) # Now enforce some bondage on the long option name, so we can # later translate it to an attribute name on some object. Have # to do this a bit late to make sure we've removed any trailing # '='. - if not longopt_re.match(long): - raise DistutilsGetoptError( + if not longopt_re.match(longopt): + raise PackagingGetoptError( ("invalid long option name '%s' " + - "(must be letters, numbers, hyphens only") % long) + "(must be letters, numbers, hyphens only") % longopt) - self.attr_name[long] = long.replace('-', '_') + self.attr_name[longopt] = longopt.replace('-', '_') if short: self.short_opts.append(short) - self.short2long[short[0]] = long + self.short2long[short[0]] = longopt def getopt(self, args=None, object=None): """Parse command-line options in args. Store as attributes on object. @@ -231,11 +233,12 @@ self._grok_option_table() - short_opts = string.join(self.short_opts) + short_opts = ' '.join(self.short_opts) + try: opts, args = getopt.getopt(args, short_opts, self.long_opts) - except getopt.error, msg: - raise DistutilsArgError(msg) + except getopt.error: + raise PackagingArgError(sys.exc_info()[1]) for opt, val in opts: if len(opt) == 2 and opt[0] == '-': # it's a short option @@ -278,6 +281,8 @@ """ if self.option_order is None: raise RuntimeError("'getopt()' hasn't been called yet") + else: + return self.option_order return self.option_order @@ -291,10 +296,10 @@ # First pass: determine maximum length of long option names max_opt = 0 for option in self.option_table: - long = option[0] + longopt = option[0] short = option[1] - l = len(long) - if long[-1] == '=': + l = len(longopt) + if longopt[-1] == '=': l = l - 1 if short is not None: l = l + 5 # " (-x)" where short == 'x' @@ -334,22 +339,20 @@ lines = ['Option summary:'] for option in self.option_table: - long, short, help = option[:3] + longopt, short, help = option[:3] text = textwrap.wrap(help, text_width) - if long[-1] == '=': - long = long[0:-1] # Case 1: no short option at all (makes life easy) if short is None: if text: - lines.append(" --%-*s %s" % (max_opt, long, text[0])) + lines.append(" --%-*s %s" % (max_opt, longopt, text[0])) else: - lines.append(" --%-*s " % (max_opt, long)) + lines.append(" --%-*s " % (max_opt, longopt)) # Case 2: we have a short option, so we have to include it # just after the long option else: - opt_names = "%s (-%s)" % (long, short) + opt_names = "%s (-%s)" % (longopt, short) if text: lines.append(" --%-*s %s" % (max_opt, opt_names, text[0])) @@ -365,7 +368,7 @@ if file is None: file = sys.stdout for line in self.generate_help(header): - file.write(line + "\n") + file.write(line + u"\n") def fancy_getopt(options, negative_opt, object, args): @@ -374,71 +377,6 @@ return parser.getopt(args, object) -if 'maketrans' in str.__dict__ : - # Python 3.2+ - WS_TRANS = str.maketrans(string.whitespace, ' ' * len(string.whitespace)) -else : - # Depreciated syntax - WS_TRANS = string.maketrans(string.whitespace, ' ' * len(string.whitespace)) - - -def wrap_text(text, width): - """wrap_text(text : string, width : int) -> [string] - - Split 'text' into multiple lines of no more than 'width' characters - each, and return the list of strings that results. - """ - - if text is None: - return [] - if len(text) <= width: - return [text] - - text = string.expandtabs(text) - text = string.translate(text, WS_TRANS) - chunks = re.split(r'( +|-+)', text) - chunks = filter(None, chunks) # ' - ' results in empty strings - lines = [] - - while chunks: - - cur_line = [] # list of chunks (to-be-joined) - cur_len = 0 # length of current line - - while chunks: - l = len(chunks[0]) - if cur_len + l <= width: # can squeeze (at least) this chunk in - cur_line.append(chunks[0]) - del chunks[0] - cur_len = cur_len + l - else: # this line is full - # drop last chunk if all space - if cur_line and cur_line[-1][0] == ' ': - del cur_line[-1] - break - - if chunks: # any chunks left to process? - - # if the current line is still empty, then we had a single - # chunk that's too big too fit on a line -- so we break - # down and break it up at the line width - if cur_len == 0: - cur_line.append(chunks[0][0:width]) - chunks[0] = chunks[0][width:] - - # all-whitespace chunks at the end of a line can be discarded - # (and we know from the re.split above that if a chunk has - # *any* whitespace, it is *all* whitespace) - if chunks[0][0] == ' ': - del chunks[0] - - # and store this line in the list-of-all-lines -- as a single - # string, of course! - lines.append(string.join(cur_line, '')) - - return lines - - class OptionDummy(object): """Dummy class just used as a place to hold command-line option values as instance attributes.""" diff --git a/distutils2/install.py b/distutils2/install.py --- a/distutils2/install.py +++ b/distutils2/install.py @@ -1,34 +1,37 @@ -"""Provides installations scripts. +"""Building blocks for installers. -The goal of this script is to install a release from the indexes (eg. -PyPI), including the dependencies of the releases if needed. +When used as a script, this module installs a release thanks to info +obtained from an index (e.g. PyPI), with dependencies. -It uses the work made in pkgutil and by the index crawlers to browse the -installed distributions, and rely on the instalation commands to install. +This is a higher-level module built on distutils2.database and +distutils2.pypi. """ -import shutil import os import sys import stat import errno -import itertools +import shutil import logging import tempfile +from sysconfig import get_config_var, get_path, is_python_build from distutils2 import logger -from distutils2._backport.pkgutil import get_distributions -from distutils2._backport.pkgutil import get_distribution -from distutils2._backport.sysconfig import get_config_var +from distutils2.dist import Distribution +from distutils2.util import (_is_archive_file, ask, get_install_method, + egginfo_to_distinfo, unpack_archive) +from distutils2.pypi import wrapper +from distutils2.version import get_version_predicate +from distutils2.database import get_distributions, get_distribution from distutils2.depgraph import generate_graph -from distutils2.index import wrapper -from distutils2.index.errors import ProjectNotFound, ReleaseNotFound -from distutils2.errors import (DistutilsError, InstallationException, - InstallationConflict) -from distutils2.version import get_version_predicate + +from distutils2.errors import (PackagingError, InstallationException, + InstallationConflict, CCompilerError) +from distutils2.pypi.errors import ProjectNotFound, ReleaseNotFound +from distutils2 import database __all__ = ['install_dists', 'install_from_infos', 'get_infos', 'remove', - 'install'] + 'install', 'install_local_project'] def _move_files(files, destination): @@ -40,42 +43,53 @@ :param files: a list of files to move. :param destination: the destination directory to put on the files. """ + for old in files: - # not using os.path.join() because basename() might not be - # unique in destination - new = "%s%s" % (destination, old) - + filename = os.path.split(old)[-1] + new = os.path.join(destination, filename) # try to make the paths. try: os.makedirs(os.path.dirname(new)) - except OSError, e: - if e.errno == errno.EEXIST: - pass - else: - raise e + except OSError: + e = sys.exc_info()[1] + if e.errno != errno.EEXIST: + raise os.rename(old, new) yield old, new -def _run_d1_install(archive_dir, path): +def _run_distutils_install(path): # backward compat: using setuptools or plain-distutils - cmd = '%s setup.py install --root=%s --record=%s' - setup_py = os.path.join(archive_dir, 'setup.py') - if 'setuptools' in open(setup_py).read(): - cmd += ' --single-version-externally-managed' - - # how to place this file in the egg-info dir - # for non-distutils2 projects ? - record_file = os.path.join(archive_dir, 'RECORD') - os.system(cmd % (sys.executable, path, record_file)) + cmd = '%s setup.py install --record=%s' + record_file = os.path.join(path, 'RECORD') + os.system(cmd % (sys.executable, record_file)) if not os.path.exists(record_file): raise ValueError('failed to install') - return open(record_file).read().split('\n') + else: + egginfo_to_distinfo(record_file, remove_egginfo=True) -def _run_d2_install(archive_dir, path): - # using our own install command - raise NotImplementedError() +def _run_setuptools_install(path): + cmd = '%s setup.py install --record=%s --single-version-externally-managed' + record_file = os.path.join(path, 'RECORD') + + os.system(cmd % (sys.executable, record_file)) + if not os.path.exists(record_file): + raise ValueError('failed to install') + else: + egginfo_to_distinfo(record_file, remove_egginfo=True) + + +def _run_packaging_install(path): + # XXX check for a valid setup.cfg? + dist = Distribution() + dist.parse_config_files() + try: + dist.run_command('install_dist') + name = dist.metadata['Name'] + return database.get_distribution(name) is not None + except (IOError, os.error, PackagingError, CCompilerError): + raise ValueError("Failed to install, " + str(sys.exc_info()[1])) def _install_dist(dist, path): @@ -87,64 +101,105 @@ * copy the files in "path" * determine if the distribution is distutils2 or distutils1. """ - where = dist.unpack(path) + where = dist.unpack() - # get into the dir - archive_dir = None - for item in os.listdir(where): - fullpath = os.path.join(where, item) - if os.path.isdir(fullpath): - archive_dir = fullpath - break - - if archive_dir is None: + if where is None: raise ValueError('Cannot locate the unpacked archive') - # install + return _run_install_from_archive(where) + + +def install_local_project(path): + """Install a distribution from a source directory. + + If the source directory contains a setup.py install using distutils1. + If a setup.cfg is found, install using the install_dist command. + + Returns True on success, False on Failure. + """ + path = os.path.abspath(path) + if os.path.isdir(path): + logger.info('Installing from source directory: %r', path) + return _run_install_from_dir(path) + elif _is_archive_file(path): + logger.info('Installing from archive: %r', path) + _unpacked_dir = tempfile.mkdtemp() + try: + unpack_archive(path, _unpacked_dir) + return _run_install_from_archive(_unpacked_dir) + finally: + shutil.rmtree(_unpacked_dir) + else: + logger.warning('No project to install.') + return False + + +def _run_install_from_archive(source_dir): + # XXX need a better way + for item in os.listdir(source_dir): + fullpath = os.path.join(source_dir, item) + if os.path.isdir(fullpath): + source_dir = fullpath + break + return _run_install_from_dir(source_dir) + + +install_methods = { + 'distutils2': _run_packaging_install, + 'setuptools': _run_setuptools_install, + 'distutils': _run_distutils_install} + + +def _run_install_from_dir(source_dir): old_dir = os.getcwd() - os.chdir(archive_dir) + os.chdir(source_dir) + install_method = get_install_method(source_dir) + func = install_methods[install_method] try: - # distutils2 or distutils1 ? - if 'setup.py' in os.listdir(archive_dir): - return _run_d1_install(archive_dir, path) - else: - return _run_d2_install(archive_dir, path) + func = install_methods[install_method] + try: + func(source_dir) + return True + except ValueError: + # failed to install + logger.info(str(sys.exc_info()[1])) + return False finally: os.chdir(old_dir) -def install_dists(dists, path, paths=sys.path): +def install_dists(dists, path, paths=None): """Install all distributions provided in dists, with the given prefix. If an error occurs while installing one of the distributions, uninstall all the installed distribution (in the context if this function). - Return a list of installed files. + Return a list of installed dists. :param dists: distributions to install :param path: base path to install distribution in :param paths: list of paths (defaults to sys.path) to look for info """ - installed_dists, installed_files = [], [] + installed_dists = [] for dist in dists: - logger.info('installing %s %s', dist.name, dist.version) + logger.info('Installing %r %s...', dist.name, dist.version) try: - installed_files.extend(_install_dist(dist, path)) + _install_dist(dist, path) installed_dists.append(dist) - except Exception, e: - logger.info('failed: %s', e) + except Exception: + logger.info('Failed: %s', sys.exc_info()[1]) # reverting for installed_dist in installed_dists: - _remove_dist(installed_dist, paths) + logger.info('Reverting %r', installed_dist) + remove(installed_dist.name, paths) raise e - - return installed_files + return installed_dists def install_from_infos(install_path=None, install=[], remove=[], conflicts=[], - paths=sys.path): + paths=None): """Install and remove the given distributions. The function signature is made to be compatible with the one of get_infos. @@ -188,7 +243,7 @@ if remove: temp_dir = tempfile.mkdtemp() for dist in remove: - files = dist.get_installed_files() + files = dist.list_installed_files() temp_files[dist] = _move_files(files, temp_dir) try: if install: @@ -236,23 +291,31 @@ Conflict contains all the conflicting distributions, if there is a conflict. """ + # this function does several things: + # 1. get a release specified by the requirements + # 2. gather its metadata, using setuptools compatibility if needed + # 3. compare this tree with what is currently installed on the system, + # return the requirements of what is missing + # 4. do that recursively and merge back the results + # 5. return a dict containing information about what is needed to install + # or remove + if not installed: - logger.info('reading installed distributions') - installed = get_distributions(use_egg_info=True) + logger.debug('Reading installed distributions') + installed = list(get_distributions(use_egg_info=True)) infos = {'install': [], 'remove': [], 'conflict': []} - # Is a compatible version of the project is already installed ? + # Is a compatible version of the project already installed ? predicate = get_version_predicate(requirements) found = False - installed = list(installed) - # check that the project isnt already installed + # check that the project isn't already installed for installed_project in installed: # is it a compatible project ? if predicate.name.lower() != installed_project.name.lower(): continue found = True - logger.info('found %s %s', installed_project.name, + logger.info('Found %r %s', installed_project.name, installed_project.version) # if we already have something installed, check it matches the @@ -262,50 +325,45 @@ break if not found: - logger.info('project not installed') + logger.debug('Project not installed') if not index: index = wrapper.ClientWrapper() + if not installed: + installed = get_distributions(use_egg_info=True) + # Get all the releases that match the requirements try: - releases = index.get_releases(requirements) + release = index.get_release(requirements) except (ReleaseNotFound, ProjectNotFound): - raise InstallationException('Release not found: "%s"' % requirements) - - # Pick up a release, and try to get the dependency tree - release = releases.get_last(requirements, prefer_final=prefer_final) + raise InstallationException('Release not found: %r' % requirements) if release is None: - logger.info('could not find a matching project') + logger.info('Could not find a matching project') return infos - # this works for Metadata 1.2 metadata = release.fetch_metadata() - # for earlier, we need to build setuptools deps if any + # we need to build setuptools deps if any if 'requires_dist' not in metadata: - deps = _get_setuptools_deps(release) - else: - deps = metadata['requires_dist'] + metadata['requires_dist'] = _get_setuptools_deps(release) - # XXX deps not used + # build the dependency graph with local and required dependencies + dists = list(installed) + dists.append(release) + depgraph = generate_graph(dists) - distributions = itertools.chain(installed, [release]) - depgraph = generate_graph(distributions) - - # Store all the already_installed packages in a list, in case of rollback. # Get what the missing deps are dists = depgraph.missing[release] if dists: - logger.info("missing dependencies found, retrieving metadata") + logger.info("Missing dependencies found, retrieving metadata") # we have missing deps for dist in dists: _update_infos(infos, get_infos(dist, index, installed)) # Fill in the infos existing = [d for d in installed if d.name == release.name] - if existing: infos['remove'].append(existing[0]) infos['conflict'].extend(depgraph.reverse_list[existing[0]]) @@ -322,26 +380,38 @@ infos[key].extend(new_infos[key]) -def _remove_dist(dist, paths=sys.path): - remove(dist.name, paths) +def remove(project_name, paths=None, auto_confirm=True): + """Removes a single project from the installation. - -def remove(project_name, paths=sys.path): - """Removes a single project from the installation""" + Returns True on success + """ dist = get_distribution(project_name, use_egg_info=True, paths=paths) if dist is None: - raise DistutilsError('Distribution "%s" not found' % project_name) - files = dist.get_installed_files(local=True) + raise PackagingError('Distribution %r not found' % project_name) + files = dist.list_installed_files(local=True) rmdirs = [] rmfiles = [] tmp = tempfile.mkdtemp(prefix=project_name + '-uninstall') + + def _move_file(source, target): + try: + os.rename(source, target) + except OSError: + return sys.exc_info()[1] + return None + + success = True + error = None try: for file_, md5, size in files: if os.path.isfile(file_): dirname, filename = os.path.split(file_) tmpfile = os.path.join(tmp, filename) try: - os.rename(file_, tmpfile) + error = _move_file(file_, tmpfile) + if error is not None: + success = False + break finally: if not os.path.isfile(file_): os.rename(tmpfile, file_) @@ -352,72 +422,119 @@ finally: shutil.rmtree(tmp) - logger.info('removing %r...', project_name) + if not success: + logger.info('%r cannot be removed.', project_name) + logger.info('Error: %s', error) + return False - file_count = 0 + logger.info('Removing %r: ', project_name) + for file_ in rmfiles: - os.remove(file_) - file_count += 1 + logger.info(' %s', file_) - dir_count = 0 - for dirname in rmdirs: - if not os.path.exists(dirname): - # could - continue + # Taken from the pip project + if auto_confirm: + response = 'y' + else: + response = ask('Proceed (y/n)? ', ('y', 'n')) - files_count = 0 - for root, dir, files in os.walk(dirname): - files_count += len(files) + if response == 'y': + file_count = 0 + for file_ in rmfiles: + os.remove(file_) + file_count += 1 - if files_count > 0: - # XXX Warning - continue + dir_count = 0 + for dirname in rmdirs: + if not os.path.exists(dirname): + # could + continue - # empty dirs with only empty dirs - if bool(os.stat(dirname).st_mode & stat.S_IWUSR): - # XXX Add a callable in shutil.rmtree to count - # the number of deleted elements - shutil.rmtree(dirname) - dir_count += 1 + files_count = 0 + for root, dir, files in os.walk(dirname): + files_count += len(files) - # removing the top path - # XXX count it ? - if os.path.exists(dist.path): - shutil.rmtree(dist.path) + if files_count > 0: + # XXX Warning + continue - logger.info('success: removed %d files and %d dirs', - file_count, dir_count) + # empty dirs with only empty dirs + if os.stat(dirname).st_mode & stat.S_IWUSR: + # XXX Add a callable in shutil.rmtree to count + # the number of deleted elements + shutil.rmtree(dirname) + dir_count += 1 + + # removing the top path + # XXX count it ? + if os.path.exists(dist.path): + shutil.rmtree(dist.path) + + logger.info('Success: removed %d files and %d dirs', + file_count, dir_count) + + return True def install(project): - logger.info('getting information about %r', project) + """Installs a project. + + Returns True on success, False on failure + """ + if is_python_build(): + # Python would try to install into the site-packages directory under + # $PREFIX, but when running from an uninstalled code checkout we don't + # want to create directories under the installation root + message = ('installing third-party projects from an uninstalled ' + 'Python is not supported') + logger.error(message) + return False + + logger.info('Checking the installation location...') + purelib_path = get_path('purelib') + + # trying to write a file there + try: + with tempfile.NamedTemporaryFile(suffix=project, + dir=purelib_path) as testfile: + testfile.write(b'test') + except OSError: + # FIXME this should check the errno, or be removed altogether (race + # condition: the directory permissions could be changed between here + # and the actual install) + logger.info('Unable to write in "%s". Do you have the permissions ?' + % purelib_path) + return False + + logger.info('Getting information about %r...', project) try: info = get_infos(project) except InstallationException: - logger.info('cound not find %r', project) - return + logger.info('Cound not find %r', project) + return False if info['install'] == []: - logger.info('nothing to install') - return + logger.info('Nothing to install') + return False install_path = get_config_var('base') try: install_from_infos(install_path, info['install'], info['remove'], info['conflict']) - except InstallationConflict, e: + except InstallationConflict: + e = sys.exc_info()[1] if logger.isEnabledFor(logging.INFO): - projects = ['%s %s' % (p.name, p.version) for p in e.args[0]] + projects = ('%r %s' % (p.name, p.version) for p in e.args[0]) logger.info('%r conflicts with %s', project, ','.join(projects)) + return True + def _main(**attrs): if 'script_args' not in attrs: - import sys attrs['requirements'] = sys.argv[1] get_infos(**attrs) - if __name__ == '__main__': _main() diff --git a/distutils2/manifest.py b/distutils2/manifest.py --- a/distutils2/manifest.py +++ b/distutils2/manifest.py @@ -1,22 +1,20 @@ -"""distutils2.manifest +"""Class representing the list of files in a distribution. -Provides a Manifest class that can be used to: +The Manifest class can be used to: - read or write a MANIFEST file - read a template file and find out the file list - -Basically, Manifest *is* the file list. - -XXX todo: document + add tests """ +# XXX todo: document + add tests import re import os +import sys import fnmatch -import logging +from distutils2 import logger from distutils2.util import write_file, convert_path -from distutils2.errors import (DistutilsTemplateError, - DistutilsInternalError) +from distutils2.errors import (PackagingTemplateError, + PackagingInternalError) __all__ = ['Manifest'] @@ -24,6 +22,7 @@ _COLLAPSE_PATTERN = re.compile('\\\w*\n', re.M) _COMMENTED_LINE = re.compile('#.*?(?=\n)|\n(?=$)', re.M | re.S) + class Manifest(object): """A list of files built by on exploring the filesystem and filtered by applying various patterns to what we find there. @@ -48,11 +47,8 @@ def sort(self): # Not a strict lexical sort! - sortable_files = map(os.path.split, self.files) - sortable_files.sort() - self.files = [] - for sort_tuple in sortable_files: - self.files.append(os.path.join(*sort_tuple)) + self.files = [os.path.join(*path_tuple) for path_tuple in + sorted(os.path.split(path) for path in self.files)] def clear(self): """Clear all collected files.""" @@ -72,7 +68,7 @@ Updates the list accordingly. """ - if isinstance(path_or_file, str): + if isinstance(path_or_file, basestring): f = open(path_or_file) else: f = path_or_file @@ -94,8 +90,8 @@ continue try: self._process_template_line(line) - except DistutilsTemplateError, msg: - logging.warning("%s, %s", path_or_file, msg) + except PackagingTemplateError: + logger.warning("%s, %s", path_or_file, sys.exc_info()[1]) def write(self, path): """Write the file list in 'self.filelist' (presumably as filled in @@ -103,22 +99,19 @@ named by 'self.manifest'. """ if os.path.isfile(path): - fp = open(path) - try: + with open(path) as fp: first_line = fp.readline() - finally: - fp.close() - if first_line != '# file GENERATED by distutils, do NOT edit\n': - logging.info("not writing to manually maintained " - "manifest file %r", path) + if first_line != '# file GENERATED by distutils2, do NOT edit\n': + logger.info("not writing to manually maintained " + "manifest file %r", path) return self.sort() self.remove_duplicates() content = self.files[:] - content.insert(0, '# file GENERATED by distutils, do NOT edit') - logging.info("writing manifest file %r", path) + content.insert(0, '# file GENERATED by distutils2, do NOT edit') + logger.info("writing manifest file %r", path) write_file(path, content) def read(self, path): @@ -126,28 +119,26 @@ fill in 'self.filelist', the list of files to include in the source distribution. """ - logging.info("reading manifest file %r", path) - manifest = open(path) - try: + logger.info("reading manifest file %r", path) + with open(path) as manifest: for line in manifest.readlines(): self.append(line) - finally: - manifest.close() - def exclude_pattern(self, pattern, anchor=1, prefix=None, is_regex=0): + def exclude_pattern(self, pattern, anchor=True, prefix=None, + is_regex=False): """Remove strings (presumably filenames) from 'files' that match 'pattern'. Other parameters are the same as for 'include_pattern()', above. - The list 'self.files' is modified in place. Return 1 if files are + The list 'self.files' is modified in place. Return True if files are found. """ - files_found = 0 + files_found = False pattern_re = _translate_pattern(pattern, anchor, prefix, is_regex) - for i in range(len(self.files)-1, -1, -1): + for i in range(len(self.files) - 1, -1, -1): if pattern_re.search(self.files[i]): del self.files[i] - files_found = 1 + files_found = True return files_found @@ -167,28 +158,28 @@ if action in ('include', 'exclude', 'global-include', 'global-exclude'): if len(words) < 2: - raise DistutilsTemplateError( + raise PackagingTemplateError( "%r expects ..." % action) - patterns = map(convert_path, words[1:]) + patterns = [convert_path(word) for word in words[1:]] elif action in ('recursive-include', 'recursive-exclude'): if len(words) < 3: - raise DistutilsTemplateError( + raise PackagingTemplateError( "%r expects

      ..." % action) dir = convert_path(words[1]) - patterns = map(convert_path, words[2:]) + patterns = [convert_path(word) for word in words[2:]] elif action in ('graft', 'prune'): if len(words) != 2: - raise DistutilsTemplateError( + raise PackagingTemplateError( "%r expects a single " % action) dir_pattern = convert_path(words[1]) else: - raise DistutilsTemplateError("unknown action %r" % action) + raise PackagingTemplateError("unknown action %r" % action) return action, patterns, dir, dir_pattern @@ -205,55 +196,56 @@ # can proceed with minimal error-checking. if action == 'include': for pattern in patterns: - if not self._include_pattern(pattern, anchor=1): - logging.warning("no files found matching %r", pattern) + if not self._include_pattern(pattern, anchor=True): + logger.warning("no files found matching %r", pattern) elif action == 'exclude': for pattern in patterns: - if not self.exclude_pattern(pattern, anchor=1): - logging.warning("no previously-included files " - "found matching %r", pattern) + if not self.exclude_pattern(pattern, anchor=True): + logger.warning("no previously-included files " + "found matching %r", pattern) elif action == 'global-include': for pattern in patterns: - if not self._include_pattern(pattern, anchor=0): - logging.warning("no files found matching %r " - "anywhere in distribution", pattern) + if not self._include_pattern(pattern, anchor=False): + logger.warning("no files found matching %r " + "anywhere in distribution", pattern) elif action == 'global-exclude': for pattern in patterns: - if not self.exclude_pattern(pattern, anchor=0): - logging.warning("no previously-included files " - "matching %r found anywhere in " - "distribution", pattern) + if not self.exclude_pattern(pattern, anchor=False): + logger.warning("no previously-included files " + "matching %r found anywhere in " + "distribution", pattern) elif action == 'recursive-include': for pattern in patterns: if not self._include_pattern(pattern, prefix=dir): - logging.warning("no files found matching %r " - "under directory %r", pattern, dir) + logger.warning("no files found matching %r " + "under directory %r", pattern, dir) elif action == 'recursive-exclude': for pattern in patterns: if not self.exclude_pattern(pattern, prefix=dir): - logging.warning("no previously-included files " - "matching %r found under directory %r", - pattern, dir) + logger.warning("no previously-included files " + "matching %r found under directory %r", + pattern, dir) elif action == 'graft': if not self._include_pattern(None, prefix=dir_pattern): - logging.warning("no directories found matching %r", - dir_pattern) + logger.warning("no directories found matching %r", + dir_pattern) elif action == 'prune': if not self.exclude_pattern(None, prefix=dir_pattern): - logging.warning("no previously-included directories found " - "matching %r", dir_pattern) + logger.warning("no previously-included directories found " + "matching %r", dir_pattern) else: - raise DistutilsInternalError( - "this cannot happen: invalid action %r" % action) + raise PackagingInternalError( + "this cannot happen: invalid action %r" % action) - def _include_pattern(self, pattern, anchor=1, prefix=None, is_regex=0): + def _include_pattern(self, pattern, anchor=True, prefix=None, + is_regex=False): """Select strings (presumably filenames) from 'self.files' that match 'pattern', a Unix-style wildcard (glob) pattern. @@ -277,9 +269,9 @@ Selected strings will be added to self.files. - Return 1 if files are found. + Return True if files are found. """ - files_found = 0 + files_found = False pattern_re = _translate_pattern(pattern, anchor, prefix, is_regex) # delayed loading of allfiles list @@ -289,21 +281,19 @@ for name in self.allfiles: if pattern_re.search(name): self.files.append(name) - files_found = 1 + files_found = True return files_found - # # Utility functions # - def _findall(dir=os.curdir): """Find all files under 'dir' and return the list of full filenames (relative to 'dir'). """ - from stat import ST_MODE, S_ISREG, S_ISDIR, S_ISLNK + from stat import S_ISREG, S_ISDIR, S_ISLNK list = [] stack = [dir] @@ -322,7 +312,7 @@ # Avoid excess stat calls -- just one will do, thank you! stat = os.stat(fullname) - mode = stat[ST_MODE] + mode = stat.st_mode if S_ISREG(mode): list.append(fullname) elif S_ISDIR(mode) and not S_ISLNK(mode): @@ -331,7 +321,6 @@ return list - def _glob_to_re(pattern): """Translate a shell-like glob pattern to a regular expression. @@ -353,7 +342,7 @@ return pattern_re -def _translate_pattern(pattern, anchor=1, prefix=None, is_regex=0): +def _translate_pattern(pattern, anchor=True, prefix=None, is_regex=False): """Translate a shell-like wildcard pattern to a compiled regular expression. @@ -362,7 +351,7 @@ or just returned as-is (assumes it's a regex object). """ if is_regex: - if isinstance(pattern, str): + if isinstance(pattern, basestring): return re.compile(pattern) else: return pattern diff --git a/distutils2/markers.py b/distutils2/markers.py --- a/distutils2/markers.py +++ b/distutils2/markers.py @@ -1,10 +1,11 @@ -""" Micro-language for PEP 345 environment markers -""" +"""Parser for the environment markers micro-language defined in PEP 345.""" + import sys import platform import os -from tokenize import tokenize, NAME, OP, STRING, ENDMARKER -from StringIO import StringIO + +from tokenize import generate_tokens, NAME, OP, STRING, ENDMARKER +from StringIO import StringIO as BytesIO __all__ = ['interpret'] @@ -30,7 +31,8 @@ 'python_full_version': sys.version.split(' ', 1)[0], 'os.name': os.name, 'platform.version': platform.version(), - 'platform.machine': platform.machine()} + 'platform.machine': platform.machine(), + 'platform.python_implementation': platform.python_implementation()} class _Operation(object): @@ -124,39 +126,39 @@ return self.left() and self.right() -class _CHAIN(object): - - def __init__(self, execution_context=None): - self.ops = [] - self.op_starting = True - self.execution_context = execution_context - - def eat(self, toktype, tokval, rowcol, line, logical_line): +def interpret(marker, execution_context=None): + """Interpret a marker and return a result depending on environment.""" + marker = marker.strip().encode() + ops = [] + op_starting = True + for token in generate_tokens(BytesIO(marker).readline): + # Unpack token + toktype, tokval, rowcol, line, logical_line = token if toktype not in (NAME, OP, STRING, ENDMARKER): raise SyntaxError('Type not supported "%s"' % tokval) - if self.op_starting: - op = _Operation(self.execution_context) - if len(self.ops) > 0: - last = self.ops[-1] + if op_starting: + op = _Operation(execution_context) + if len(ops) > 0: + last = ops[-1] if isinstance(last, (_OR, _AND)) and not last.filled(): last.right = op else: - self.ops.append(op) + ops.append(op) else: - self.ops.append(op) - self.op_starting = False + ops.append(op) + op_starting = False else: - op = self.ops[-1] + op = ops[-1] if (toktype == ENDMARKER or (toktype == NAME and tokval in ('and', 'or'))): if toktype == NAME and tokval == 'and': - self.ops.append(_AND(self.ops.pop())) + ops.append(_AND(ops.pop())) elif toktype == NAME and tokval == 'or': - self.ops.append(_OR(self.ops.pop())) - self.op_starting = True - return + ops.append(_OR(ops.pop())) + op_starting = True + continue if isinstance(op, (_OR, _AND)) and op.right is not None: op = op.right @@ -179,16 +181,7 @@ else: op.op = tokval - def result(self): - for op in self.ops: - if not op(): - return False - return True - - -def interpret(marker, execution_context=None): - """Interpret a marker and return a result depending on environment.""" - marker = marker.strip() - operations = _CHAIN(execution_context) - tokenize(StringIO(marker).readline, operations.eat) - return operations.result() + for op in ops: + if not op(): + return False + return True diff --git a/distutils2/metadata.py b/distutils2/metadata.py --- a/distutils2/metadata.py +++ b/distutils2/metadata.py @@ -3,17 +3,19 @@ Supports all metadata formats (1.0, 1.1, 1.2). """ +import codecs import re +import logging + from StringIO import StringIO from email import message_from_file - from distutils2 import logger from distutils2.markers import interpret from distutils2.version import (is_valid_predicate, is_valid_version, - is_valid_versions) + is_valid_versions) from distutils2.errors import (MetadataMissingError, - MetadataConflictError, - MetadataUnrecognizedVersionError) + MetadataConflictError, + MetadataUnrecognizedVersionError) try: # docutils is installed @@ -39,8 +41,7 @@ _HAS_DOCUTILS = False # public API of this module -__all__ = ['Metadata', 'get_metadata_version', 'metadata_to_dict', - 'PKG_INFO_ENCODING', 'PKG_INFO_PREFERRED_VERSION'] +__all__ = ['Metadata', 'PKG_INFO_ENCODING', 'PKG_INFO_PREFERRED_VERSION'] # Encoding used for the PKG-INFO files PKG_INFO_ENCODING = 'utf-8' @@ -99,7 +100,7 @@ return True return False - keys = fields.keys() + keys = list(fields) possible_versions = ['1.0', '1.1', '1.2'] # first let's try to see if a field is not part of one of the version @@ -138,52 +139,6 @@ return '1.2' -def get_metadata_version(metadata): - """Return the Metadata-Version attribute - - - *metadata* give a METADATA object - """ - return metadata['Metadata-Version'] - - -def metadata_to_dict(metadata): - """Convert a metadata object to a dict - - - *metadata* give a METADATA object - """ - data = { - 'metadata_version': metadata['Metadata-Version'], - 'name': metadata['Name'], - 'version': metadata['Version'], - 'summary': metadata['Summary'], - 'home_page': metadata['Home-page'], - 'author': metadata['Author'], - 'author_email': metadata['Author-email'], - 'license': metadata['License'], - 'description': metadata['Description'], - 'keywords': metadata['Keywords'], - 'platform': metadata['Platform'], - 'classifier': metadata['Classifier'], - 'download_url': metadata['Download-URL'], - } - - if metadata['Metadata-Version'] == '1.2': - data['requires_dist'] = metadata['Requires-Dist'] - data['requires_python'] = metadata['Requires-Python'] - data['requires_external'] = metadata['Requires-External'] - data['provides_dist'] = metadata['Provides-Dist'] - data['obsoletes_dist'] = metadata['Obsoletes-Dist'] - data['project_url'] = [','.join(url) for url in - metadata['Project-URL']] - - elif metadata['Metadata-Version'] == '1.1': - data['provides'] = metadata['Provides'] - data['requires'] = metadata['Requires'] - data['obsoletes'] = metadata['Obsoletes'] - - return data - - _ATTR2FIELD = { 'metadata_version': 'Metadata-Version', 'name': 'Name', @@ -225,6 +180,8 @@ _UNICODEFIELDS = ('Author', 'Maintainer', 'Summary', 'Description') +_MISSING = object() + class NoDefault(object): """Marker object used for clean representation""" @@ -248,10 +205,8 @@ # also document the mapping API and UNKNOWN default key def __init__(self, path=None, platform_dependent=False, - execution_context=None, fileobj=None, mapping=None, - display_warnings=False): + execution_context=None, fileobj=None, mapping=None): self._fields = {} - self.display_warnings = display_warnings self.requires_files = [] self.docutils_support = _HAS_DOCUTILS self.platform_dependent = platform_dependent @@ -269,12 +224,7 @@ self._fields['Metadata-Version'] = _best_version(self._fields) def _write_field(self, file, name, value): - file.write('%s: %s\n' % (name, value)) - - def _encode_field(self, value): - if isinstance(value, unicode): - return value.encode(PKG_INFO_ENCODING) - return str(value) + file.write(u'%s: %s\n' % (name, value)) def __getitem__(self, name): return self.get(name) @@ -358,7 +308,8 @@ def read(self, filepath): """Read the metadata values from a file path.""" - self.read_file(open(filepath)) + with codecs.open(filepath, 'r', encoding='utf-8') as fp: + self.read_file(fp) def read_file(self, fileob): """Read the metadata values from a file object.""" @@ -380,11 +331,8 @@ def write(self, filepath): """Write the metadata fields to filepath.""" - pkg_info = open(filepath, 'w') - try: - self.write_file(pkg_info) - finally: - pkg_info.close() + with codecs.open(filepath, 'w', encoding='utf-8') as fp: + self.write_file(fp) def write_file(self, fileobject): """Write the PKG-INFO format data to a file object.""" @@ -437,36 +385,38 @@ if ((name in _ELEMENTSFIELD or name == 'Platform') and not isinstance(value, (list, tuple))): - if isinstance(value, str): + if isinstance(value, basestring): value = [v.strip() for v in value.split(',')] else: value = [] elif (name in _LISTFIELDS and not isinstance(value, (list, tuple))): - if isinstance(value, str): + if isinstance(value, basestring): value = [value] else: value = [] - if self.display_warnings: + if logger.isEnabledFor(logging.WARNING): + project_name = self['Name'] + if name in _PREDICATE_FIELDS and value is not None: for v in value: # check that the values are valid predicates if not is_valid_predicate(v.split(';')[0]): - logger.warn('"%s" is not a valid predicate (field "%s")' % - (v, name)) + logger.warning( + '%r: %r is not a valid predicate (field %r)', + project_name, v, name) # FIXME this rejects UNKNOWN, is that right? elif name in _VERSIONS_FIELDS and value is not None: if not is_valid_versions(value): - logger.warn('"%s" is not a valid version (field "%s")' % - (value, name)) + logger.warning('%r: %r is not a valid version (field %r)', + project_name, value, name) elif name in _VERSION_FIELDS and value is not None: if not is_valid_version(value): - logger.warn('"%s" is not a valid version (field "%s")' % - (value, name)) + logger.warning('%r: %r is not a valid version (field %r)', + project_name, value, name) if name in _UNICODEFIELDS: - value = self._encode_field(value) if name == 'Description': value = self._remove_line_prefix(value) @@ -482,7 +432,7 @@ return default if name in _UNICODEFIELDS: value = self._fields[name] - return self._encode_field(value) + return value elif name in _LISTFIELDS: value = self._fields[name] if value is None: @@ -493,17 +443,17 @@ if not valid: continue if name not in _LISTTUPLEFIELDS: - res.append(self._encode_field(val)) + res.append(val) else: # That's for Project-URL - res.append((self._encode_field(val[0]), val[1])) + res.append((val[0], val[1])) return res elif name in _ELEMENTSFIELD: valid, value = self._platform(self._fields[name]) if not valid: return [] - if isinstance(value, str): + if isinstance(value, basestring): return value.split(',') valid, value = self._platform(self._fields[name]) if not valid: @@ -551,13 +501,55 @@ return missing, warnings + def todict(self): + """Return fields as a dict. + + Field names will be converted to use the underscore-lowercase style + instead of hyphen-mixed case (i.e. home_page instead of Home-page). + """ + data = { + 'metadata_version': self['Metadata-Version'], + 'name': self['Name'], + 'version': self['Version'], + 'summary': self['Summary'], + 'home_page': self['Home-page'], + 'author': self['Author'], + 'author_email': self['Author-email'], + 'license': self['License'], + 'description': self['Description'], + 'keywords': self['Keywords'], + 'platform': self['Platform'], + 'classifier': self['Classifier'], + 'download_url': self['Download-URL'], + } + + if self['Metadata-Version'] == '1.2': + data['requires_dist'] = self['Requires-Dist'] + data['requires_python'] = self['Requires-Python'] + data['requires_external'] = self['Requires-External'] + data['provides_dist'] = self['Provides-Dist'] + data['obsoletes_dist'] = self['Obsoletes-Dist'] + data['project_url'] = [','.join(url) for url in + self['Project-URL']] + + elif self['Metadata-Version'] == '1.1': + data['provides'] = self['Provides'] + data['requires'] = self['Requires'] + data['obsoletes'] = self['Obsoletes'] + + return data + # Mapping API def keys(self): return _version2fieldlist(self['Metadata-Version']) + def __iter__(self): + for key in self.keys(): + yield key + def values(self): - return [self[key] for key in self.keys()] + return [self[key] for key in list(self.keys())] def items(self): - return [(key, self[key]) for key in self.keys()] + return [(key, self[key]) for key in list(self.keys())] diff --git a/distutils2/mkcfg.py b/distutils2/mkcfg.py deleted file mode 100644 --- a/distutils2/mkcfg.py +++ /dev/null @@ -1,657 +0,0 @@ -#!/usr/bin/env python -# -# Helper for automating the creation of a package by looking at you -# current directory and asking the user questions. -# -# Available as either a stand-alone file or callable from the distutils2 -# package: -# -# python -m distutils2.mkcfg -# or: -# python mkcfg.py -# -# Written by Sean Reifschneider -# -# Original TODO list: -# Look for a license file and automatically add the category. -# When a .c file is found during the walk, can we add it as an extension? -# Ask if there is a maintainer different that the author -# Ask for the platform (can we detect this via "import win32" or something?) -# Ask for the dependencies. -# Ask for the Requires-Dist -# Ask for the Provides-Dist -# Ask for a description -# Detect scripts (not sure how. #! outside of package?) - -import os -import sys -import glob -import re -import shutil -from ConfigParser import RawConfigParser -from textwrap import dedent -try: - from hashlib import md5 -except ImportError: - from distutils2._backport.hashlib import md5 -# importing this with an underscore as it should be replaced by the -# dict form or another structures for all purposes -from distutils2._trove import all_classifiers as _CLASSIFIERS_LIST -from distutils2._backport import sysconfig - -_FILENAME = 'setup.cfg' - -_helptext = { - 'name': ''' -The name of the program to be packaged, usually a single word composed -of lower-case characters such as "python", "sqlalchemy", or "CherryPy". -''', - 'version': ''' -Version number of the software, typically 2 or 3 numbers separated by dots -such as "1.00", "0.6", or "3.02.01". "0.1.0" is recommended for initial -development. -''', - 'summary': ''' -A one-line summary of what this project is or does, typically a sentence 80 -characters or less in length. -''', - 'author': ''' -The full name of the author (typically you). -''', - 'author_email': ''' -E-mail address of the project author (typically you). -''', - 'do_classifier': ''' -Trove classifiers are optional identifiers that allow you to specify the -intended audience by saying things like "Beta software with a text UI -for Linux under the PSF license. However, this can be a somewhat involved -process. -''', - 'packages': ''' -You can provide a package name contained in your project. -''', - 'modules': ''' -You can provide a python module contained in your project. -''', - 'extra_files': ''' -You can provide extra files/dirs contained in your project. -It has to follow the template syntax. XXX add help here. -''', - - 'home_page': ''' -The home page for the project, typically starting with "http://". -''', - 'trove_license': ''' -Optionally you can specify a license. Type a string that identifies a common -license, and then you can select a list of license specifiers. -''', - 'trove_generic': ''' -Optionally, you can set other trove identifiers for things such as the -human language, programming language, user interface, etc... -''', - 'setup.py found': ''' -The setup.py script will be executed to retrieve the metadata. -A wizard will be run if you answer "n", -''', -} - -# XXX everything needs docstrings and tests (both low-level tests of various -# methods and functional tests of running the script) - - -def ask_yn(question, default=None, helptext=None): - question += ' (y/n)' - while True: - answer = ask(question, default, helptext, required=True) - if answer and answer[0].lower() in 'yn': - return answer[0].lower() - - print '\nERROR: You must select "Y" or "N".\n' - - -def ask(question, default=None, helptext=None, required=True, - lengthy=False, multiline=False): - prompt = '%s: ' % (question,) - if default: - prompt = '%s [%s]: ' % (question, default) - if default and len(question) + len(default) > 70: - prompt = '%s\n [%s]: ' % (question, default) - if lengthy or multiline: - prompt += '\n > ' - - if not helptext: - helptext = 'No additional help available.' - - helptext = helptext.strip("\n") - - while True: - sys.stdout.write(prompt) - sys.stdout.flush() - - line = sys.stdin.readline().strip() - if line == '?': - print '=' * 70 - print helptext - print '=' * 70 - continue - if default and not line: - return default - if not line and required: - print '*' * 70 - print 'This value cannot be empty.' - print '===========================' - if helptext: - print helptext - print '*' * 70 - continue - return line - - -def _build_classifiers_dict(classifiers): - d = {} - for key in classifiers: - subDict = d - for subkey in key.split(' :: '): - if not subkey in subDict: - subDict[subkey] = {} - subDict = subDict[subkey] - return d - -CLASSIFIERS = _build_classifiers_dict(_CLASSIFIERS_LIST) - - -def _build_licences(classifiers): - res = [] - for index, item in enumerate(classifiers): - if not item.startswith('License :: '): - continue - res.append((index, item.split(' :: ')[-1].lower())) - return res - -LICENCES = _build_licences(_CLASSIFIERS_LIST) - - -class MainProgram(object): - def __init__(self): - self.configparser = None - self.classifiers = set([]) - self.data = {} - self.data['classifier'] = self.classifiers - self.data['packages'] = [] - self.data['modules'] = [] - self.data['platform'] = [] - self.data['resources'] = [] - self.data['extra_files'] = [] - self.data['scripts'] = [] - self.load_config_file() - - def lookup_option(self, key): - if not self.configparser.has_option('DEFAULT', key): - return None - return self.configparser.get('DEFAULT', key) - - def load_config_file(self): - self.configparser = RawConfigParser() - # TODO replace with section in distutils config file - #XXX freedesktop - self.configparser.read(os.path.expanduser('~/.mkcfg')) - self.data['author'] = self.lookup_option('author') - self.data['author_email'] = self.lookup_option('author_email') - - def update_config_file(self): - valuesDifferent = False - # FIXME looking only for those two fields seems wrong - for compareKey in ('author', 'author_email'): - if self.lookup_option(compareKey) != self.data[compareKey]: - valuesDifferent = True - self.configparser.set('DEFAULT', compareKey, - self.data[compareKey]) - - if not valuesDifferent: - return - - #XXX freedesktop - fp = open(os.path.expanduser('~/.mkcfgpy'), 'w') - try: - self.configparser.write(fp) - finally: - fp.close() - - def load_existing_setup_script(self): - """ Generate a setup.cfg from an existing setup.py. - - It only exports the distutils metadata (setuptools specific metadata - is not actually supported). - """ - setuppath = 'setup.py' - if not os.path.exists(setuppath): - return - else: - ans = ask_yn(('A legacy setup.py has been found.\n' - 'Would you like to convert it to a setup.cfg ?'), - 'y', - _helptext['setup.py found']) - if ans != 'y': - return - - data = self.data - - def setup(**attrs): - """Mock the setup(**attrs) in order to retrive metadata.""" - # use the distutils v1 processings to correctly parse metadata. - #XXX we could also use the setuptools distibution ??? - from distutils.dist import Distribution - dist = Distribution(attrs) - dist.parse_config_files() - # 1. retrieves metadata that are quite similar PEP314<->PEP345 - labels = (('name',) * 2, - ('version',) * 2, - ('author',) * 2, - ('author_email',) * 2, - ('maintainer',) * 2, - ('maintainer_email',) * 2, - ('description', 'summary'), - ('long_description', 'description'), - ('url', 'home_page'), - ('platforms', 'platform')) - - if sys.version[:3] >= '2.5': - labels += (('provides', 'provides-dist'), - ('obsoletes', 'obsoletes-dist'), - ('requires', 'requires-dist'),) - get = lambda lab: getattr(dist.metadata, lab.replace('-', '_')) - data.update((new, get(old)) for (old, new) in labels if get(old)) - # 2. retrieves data that requires special processings. - data['classifier'].update(dist.get_classifiers() or []) - data['scripts'].extend(dist.scripts or []) - data['packages'].extend(dist.packages or []) - data['modules'].extend(dist.py_modules or []) - # 2.1 data_files -> resources. - if dist.data_files: - if len(dist.data_files) < 2 or \ - isinstance(dist.data_files[1], str): - dist.data_files = [('', dist.data_files)] - # add tokens in the destination paths - vars = {'distribution.name': data['name']} - path_tokens = sysconfig.get_paths(vars=vars).items() - # sort tokens to use the longest one first - # TODO chain two sorted with key arguments, remove cmp - path_tokens.sort(cmp=lambda x, y: cmp(len(y), len(x)), - key=lambda x: x[1]) - for dest, srcs in (dist.data_files or []): - dest = os.path.join(sys.prefix, dest) - for tok, path in path_tokens: - if dest.startswith(path): - dest = ('{%s}' % tok) + dest[len(path):] - files = [('/ '.join(src.rsplit('/', 1)), dest) - for src in srcs] - data['resources'].extend(files) - continue - # 2.2 package_data -> extra_files - package_dirs = dist.package_dir or {} - for package, extras in dist.package_data.iteritems() or []: - package_dir = package_dirs.get(package, package) - files = [os.path.join(package_dir, f) for f in extras] - data['extra_files'].extend(files) - - # Use README file if its content is the desciption - if "description" in data: - ref = md5(re.sub('\s', '', self.data['description']).lower()) - ref = ref.digest() - for readme in glob.glob('README*'): - fp = open(readme) - try: - contents = fp.read() - finally: - fp.close() - val = md5(re.sub('\s', '', contents.lower())).digest() - if val == ref: - del data['description'] - data['description-file'] = readme - break - - # apply monkey patch to distutils (v1) and setuptools (if needed) - # (abord the feature if distutils v1 has been killed) - try: - import distutils.core as DC - DC.setup # ensure distutils v1 - except (ImportError, AttributeError): - return - saved_setups = [(DC, DC.setup)] - DC.setup = setup - try: - import setuptools - saved_setups.append((setuptools, setuptools.setup)) - setuptools.setup = setup - except (ImportError, AttributeError): - pass - # get metadata by executing the setup.py with the patched setup(...) - success = False # for python < 2.4 - try: - pyenv = globals().copy() - execfile(setuppath, pyenv) - success = True - finally: # revert monkey patches - for patched_module, original_setup in saved_setups: - patched_module.setup = original_setup - if not self.data: - raise ValueError('Unable to load metadata from setup.py') - return success - - def inspect_file(self, path): - fp = open(path, 'r') - try: - for _ in xrange(10): - line = fp.readline() - m = re.match(r'^#!.*python((?P\d)(\.\d+)?)?$', line) - if m: - if m.group('major') == '3': - self.classifiers.add( - 'Programming Language :: Python :: 3') - else: - self.classifiers.add( - 'Programming Language :: Python :: 2') - finally: - fp.close() - - def inspect_directory(self): - dirName = os.path.basename(os.getcwd()) - self.data['name'] = dirName - m = re.match(r'(.*)-(\d.+)', dirName) - if m: - self.data['name'] = m.group(1) - self.data['version'] = m.group(2) - - def query_user(self): - self.data['name'] = ask('Project name', self.data['name'], - _helptext['name']) - self.data['version'] = ask('Current version number', - self.data.get('version'), _helptext['version']) - self.data['summary'] = ask('Package summary', - self.data.get('summary'), _helptext['summary'], - lengthy=True) - self.data['author'] = ask('Author name', - self.data.get('author'), _helptext['author']) - self.data['author_email'] = ask('Author e-mail address', - self.data.get('author_email'), _helptext['author_email']) - self.data['home_page'] = ask('Project Home Page', - self.data.get('home_page'), _helptext['home_page'], - required=False) - - if ask_yn('Do you want me to automatically build the file list ' - 'with everything I can find in the current directory ? ' - 'If you say no, you will have to define them manually.') == 'y': - self._find_files() - else: - while ask_yn('Do you want to add a single module ?' - ' (you will be able to add full packages next)', - helptext=_helptext['modules']) == 'y': - self._set_multi('Module name', 'modules') - - while ask_yn('Do you want to add a package ?', - helptext=_helptext['packages']) == 'y': - self._set_multi('Package name', 'packages') - - while ask_yn('Do you want to add an extra file ?', - helptext=_helptext['extra_files']) == 'y': - self._set_multi('Extra file/dir name', 'extra_files') - - if ask_yn('Do you want to set Trove classifiers?', - helptext=_helptext['do_classifier']) == 'y': - self.set_classifier() - - def _find_files(self): - # we are looking for python modules and packages, - # other stuff are added as regular files - pkgs = self.data['packages'] - modules = self.data['modules'] - extra_files = self.data['extra_files'] - - def is_package(path): - return os.path.exists(os.path.join(path, '__init__.py')) - - curdir = os.getcwd() - scanned = [] - _pref = ['lib', 'include', 'dist', 'build', '.', '~'] - _suf = ['.pyc'] - - def to_skip(path): - path = relative(path) - - for pref in _pref: - if path.startswith(pref): - return True - - for suf in _suf: - if path.endswith(suf): - return True - - return False - - def relative(path): - return path[len(curdir) + 1:] - - def dotted(path): - res = relative(path).replace(os.path.sep, '.') - if res.endswith('.py'): - res = res[:-len('.py')] - return res - - # first pass : packages - for root, dirs, files in os.walk(curdir): - if to_skip(root): - continue - for dir_ in sorted(dirs): - if to_skip(dir_): - continue - fullpath = os.path.join(root, dir_) - dotted_name = dotted(fullpath) - if is_package(fullpath) and dotted_name not in pkgs: - pkgs.append(dotted_name) - scanned.append(fullpath) - - # modules and extra files - for root, dirs, files in os.walk(curdir): - if to_skip(root): - continue - - if True in [root.startswith(path) for path in scanned]: - continue - - for file in sorted(files): - fullpath = os.path.join(root, file) - if to_skip(fullpath): - continue - # single module ? - if os.path.splitext(file)[-1] == '.py': - modules.append(dotted(fullpath)) - else: - extra_files.append(relative(fullpath)) - - def _set_multi(self, question, name): - existing_values = self.data[name] - value = ask(question, helptext=_helptext[name]).strip() - if value not in existing_values: - existing_values.append(value) - - def set_classifier(self): - self.set_devel_status(self.classifiers) - self.set_license(self.classifiers) - self.set_other_classifier(self.classifiers) - - def set_other_classifier(self, classifiers): - if ask_yn('Do you want to set other trove identifiers', 'n', - _helptext['trove_generic']) != 'y': - return - self.walk_classifiers(classifiers, [CLASSIFIERS], '') - - def walk_classifiers(self, classifiers, trovepath, desc): - trove = trovepath[-1] - - if not trove: - return - - for key in sorted(trove): - if len(trove[key]) == 0: - if ask_yn('Add "%s"' % desc[4:] + ' :: ' + key, 'n') == 'y': - classifiers.add(desc[4:] + ' :: ' + key) - continue - - if ask_yn('Do you want to set items under\n "%s" (%d sub-items)' - % (key, len(trove[key])), 'n', - _helptext['trove_generic']) == 'y': - self.walk_classifiers(classifiers, trovepath + [trove[key]], - desc + ' :: ' + key) - - def set_license(self, classifiers): - while True: - license = ask('What license do you use', - helptext=_helptext['trove_license'], required=False) - if not license: - return - - license_words = license.lower().split(' ') - found_list = [] - - for index, licence in LICENCES: - for word in license_words: - if word in licence: - found_list.append(index) - break - - if len(found_list) == 0: - print('ERROR: Could not find a matching license for "%s"' % \ - license) - continue - - question = 'Matching licenses:\n\n' - - for index, list_index in enumerate(found_list): - question += ' %s) %s\n' % (index + 1, - _CLASSIFIERS_LIST[list_index]) - - question += ('\nType the number of the license you wish to use or ' - '? to try again:') - choice = ask(question, required=False) - - if choice == '?': - continue - if choice == '': - return - - try: - index = found_list[int(choice) - 1] - except ValueError: - print ("ERROR: Invalid selection, type a number from the list " - "above.") - - classifiers.add(_CLASSIFIERS_LIST[index]) - return - - def set_devel_status(self, classifiers): - while True: - choice = ask(dedent('''\ - Please select the project status: - - 1 - Planning - 2 - Pre-Alpha - 3 - Alpha - 4 - Beta - 5 - Production/Stable - 6 - Mature - 7 - Inactive - - Status'''), required=False) - if choice: - try: - choice = int(choice) - 1 - key = ['Development Status :: 1 - Planning', - 'Development Status :: 2 - Pre-Alpha', - 'Development Status :: 3 - Alpha', - 'Development Status :: 4 - Beta', - 'Development Status :: 5 - Production/Stable', - 'Development Status :: 6 - Mature', - 'Development Status :: 7 - Inactive'][choice] - classifiers.add(key) - return - except (IndexError, ValueError): - print ("ERROR: Invalid selection, type a single digit " - "number.") - - def _dotted_packages(self, data): - packages = sorted(data) - modified_pkgs = [] - for pkg in packages: - pkg = pkg.lstrip('./') - pkg = pkg.replace('/', '.') - modified_pkgs.append(pkg) - return modified_pkgs - - def write_setup_script(self): - if os.path.exists(_FILENAME): - if os.path.exists('%s.old' % _FILENAME): - print("ERROR: %(name)s.old backup exists, please check that " - "current %(name)s is correct and remove %(name)s.old" % \ - {'name': _FILENAME}) - return - shutil.move(_FILENAME, '%s.old' % _FILENAME) - - fp = open(_FILENAME, 'w') - try: - fp.write('[metadata]\n') - # simple string entries - for name in ('name', 'version', 'summary', 'download_url'): - fp.write('%s = %s\n' % (name, self.data.get(name, 'UNKNOWN'))) - # optional string entries - if 'keywords' in self.data and self.data['keywords']: - fp.write('keywords = %s\n' % ' '.join(self.data['keywords'])) - for name in ('home_page', 'author', 'author_email', - 'maintainer', 'maintainer_email', 'description-file'): - if name in self.data and self.data[name]: - fp.write('%s = %s\n' % (name, self.data[name])) - if 'description' in self.data: - fp.write( - 'description = %s\n' - % '\n |'.join(self.data['description'].split('\n'))) - # multiple use string entries - for name in ('platform', 'supported-platform', 'classifier', - 'requires-dist', 'provides-dist', 'obsoletes-dist', - 'requires-external'): - if not(name in self.data and self.data[name]): - continue - fp.write('%s = ' % name) - fp.write(''.join(' %s\n' % val - for val in self.data[name]).lstrip()) - fp.write('\n[files]\n') - for name in ('packages', 'modules', 'scripts', - 'package_data', 'extra_files'): - if not(name in self.data and self.data[name]): - continue - fp.write('%s = %s\n' - % (name, '\n '.join(self.data[name]).strip())) - fp.write('\nresources =\n') - for src, dest in self.data['resources']: - fp.write(' %s = %s\n' % (src, dest)) - fp.write('\n') - - finally: - fp.close() - - os.chmod(_FILENAME, 0644) - print 'Wrote "%s".' % _FILENAME - - -def main(): - """Main entry point.""" - program = MainProgram() - # uncomment when implemented - if not program.load_existing_setup_script(): - program.inspect_directory() - program.query_user() - program.update_config_file() - program.write_setup_script() - # distutils2.util.cfg_to_args() - - -if __name__ == '__main__': - main() diff --git a/distutils2/index/__init__.py b/distutils2/pypi/__init__.py rename from distutils2/index/__init__.py rename to distutils2/pypi/__init__.py --- a/distutils2/index/__init__.py +++ b/distutils2/pypi/__init__.py @@ -1,6 +1,4 @@ -"""Package containing ways to interact with Index APIs. - -""" +"""Low-level and high-level APIs to interact with project indexes.""" __all__ = ['simple', 'xmlrpc', @@ -8,4 +6,4 @@ 'errors', 'mirrors'] -from dist import ReleaseInfo, ReleasesList, DistInfo +from distutils2.pypi.dist import ReleaseInfo, ReleasesList, DistInfo diff --git a/distutils2/index/base.py b/distutils2/pypi/base.py rename from distutils2/index/base.py rename to distutils2/pypi/base.py --- a/distutils2/index/base.py +++ b/distutils2/pypi/base.py @@ -1,4 +1,6 @@ -from distutils2.index.dist import ReleasesList +"""Base class for index crawlers.""" + +from distutils2.pypi.dist import ReleasesList class BaseClient(object): diff --git a/distutils2/index/dist.py b/distutils2/pypi/dist.py rename from distutils2/index/dist.py rename to distutils2/pypi/dist.py --- a/distutils2/index/dist.py +++ b/distutils2/pypi/dist.py @@ -1,35 +1,25 @@ -"""distutils2.index.dist +"""Classes representing releases and distributions retrieved from indexes. -Provides useful classes to represent the release and distributions retrieved -from indexes. +A project (= unique name) can have several releases (= versions) and +each release can have several distributions (= sdist and bdists). -A project can have several releases (=versions) and each release can have -several distributions (sdist, bdist). +Release objects contain metadata-related information (see PEP 376); +distribution objects contain download-related information. +""" -The release contains the metadata related informations (see PEP 384), and the -distributions contains download related informations. - -""" -import mimetypes import re -import tarfile +import hashlib import tempfile import urllib import urlparse -import zipfile -try: - import hashlib -except ImportError: - from distutils2._backport import hashlib +from distutils2.errors import IrrationalVersionError +from distutils2.version import (suggest_normalized_version, NormalizedVersion, + get_version_predicate) +from distutils2.metadata import Metadata +from distutils2.pypi.errors import (HashDoesNotMatch, UnsupportedHashName, + CantParseArchiveName) +from distutils2.util import unpack_archive -from distutils2._backport.shutil import unpack_archive -from distutils2.errors import IrrationalVersionError -from distutils2.index.errors import (HashDoesNotMatch, UnsupportedHashName, - CantParseArchiveName) -from distutils2.version import (suggest_normalized_version, NormalizedVersion, - get_version_predicate) -from distutils2.metadata import Metadata -from distutils2.util import splitext __all__ = ['ReleaseInfo', 'DistInfo', 'ReleasesList', 'get_infos_from_url'] @@ -94,7 +84,7 @@ def fetch_metadata(self): """If the metadata is not set, use the indexes to get it""" if not self.metadata: - self._index.get_metadata(self.name, '%s' % self.version) + self._index.get_metadata(self.name, str(self.version)) return self.metadata @property @@ -104,7 +94,7 @@ def fetch_distributions(self): if self.dists is None: - self._index.get_distributions(self.name, '%s' % self.version) + self._index.get_distributions(self.name, str(self.version)) if self.dists is None: self.dists = {} return self.dists @@ -140,14 +130,14 @@ not return one existing distribution. """ if len(self.dists) == 0: - raise LookupError() + raise LookupError if dist_type: return self[dist_type] if prefer_source: if "sdist" in self.dists: dist = self["sdist"] else: - dist = self.dists.values()[0] + dist = next(self.dists.values()) return dist def unpack(self, path=None, prefer_source=True): @@ -254,14 +244,14 @@ self._url = None self.add_url(url, hashname, hashval, is_external) - def add_url(self, url, hashname=None, hashval=None, is_external=True): + def add_url(self, url=None, hashname=None, hashval=None, is_external=True): """Add a new url to the list of urls""" if hashname is not None: try: hashlib.new(hashname) except ValueError: raise UnsupportedHashName(hashname) - if not url in [u['url'] for u in self.urls]: + if url not in [u['url'] for u in self.urls]: self.urls.append({ 'url': url, 'hashname': hashname, @@ -323,20 +313,21 @@ path = tempfile.mkdtemp() filename = self.download(path) - content_type = mimetypes.guess_type(filename)[0] - self._unpacked_dir = unpack_archive(filename, path) + unpack_archive(filename, path) + self._unpacked_dir = path - return self._unpacked_dir + return path def _check_md5(self, filename): """Check that the md5 checksum of the given file matches the one in url param""" hashname = self.url['hashname'] expected_hashval = self.url['hashval'] - if not None in (expected_hashval, hashname): - f = open(filename) - hashval = hashlib.new(hashname) - hashval.update(f.read()) + if None not in (expected_hashval, hashname): + with open(filename, 'rb') as f: + hashval = hashlib.new(hashname) + hashval.update(f.read()) + if hashval.hexdigest() != expected_hashval: raise HashDoesNotMatch("got %s instead of %s" % (hashval.hexdigest(), expected_hashval)) @@ -408,19 +399,19 @@ """ if release: if release.name.lower() != self.name.lower(): - raise ValueError("%s is not the same project than %s" % + raise ValueError("%s is not the same project as %s" % (release.name, self.name)) - version = '%s' % release.version + version = str(release.version) - if not version in self.get_versions(): + if version not in self.get_versions(): # append only if not already exists self.releases.append(release) - for dist in release.dists.itervalues(): + for dist in release.dists.values(): for url in dist.urls: self.add_release(version, dist.dist_type, **url) else: - matches = [r for r in self.releases if '%s' % r.version == version - and r.name == self.name] + matches = [r for r in self.releases + if str(r.version) == version and r.name == self.name] if not matches: release = ReleaseInfo(self.name, version, index=self._index) self.releases.append(release) @@ -448,19 +439,19 @@ sort_by.append("version") self.releases.sort( - key=lambda i: [getattr(i, arg) for arg in sort_by], + key=lambda i: tuple(getattr(i, arg) for arg in sort_by), reverse=reverse, *args, **kwargs) def get_release(self, version): """Return a release from its version.""" - matches = [r for r in self.releases if "%s" % r.version == version] + matches = [r for r in self.releases if str(r.version) == version] if len(matches) != 1: raise KeyError(version) return matches[0] def get_versions(self): """Return a list of releases versions contained""" - return ["%s" % r.version for r in self.releases] + return [str(r.version) for r in self.releases] def __getitem__(self, key): return self.releases[key] @@ -532,7 +523,7 @@ # we dont get a good version number: recurse ! return eager_split(str, maxsplit - 1) else: - return (name, version) + return name, version if probable_name is not None: probable_name = probable_name.lower() name = None @@ -545,6 +536,6 @@ version = suggest_normalized_version(version) if version is not None and name != "": - return (name.lower(), version) + return name.lower(), version else: raise CantParseArchiveName(archive_name) diff --git a/distutils2/index/errors.py b/distutils2/pypi/errors.py rename from distutils2/index/errors.py rename to distutils2/pypi/errors.py --- a/distutils2/index/errors.py +++ b/distutils2/pypi/errors.py @@ -1,27 +1,25 @@ -"""distutils2.pypi.errors +"""Exceptions raised by distutils2.pypi code.""" -All errors and exceptions raised by PyPiIndex classes. -""" -from distutils2.errors import DistutilsIndexError +from distutils2.errors import PackagingPyPIError -class ProjectNotFound(DistutilsIndexError): +class ProjectNotFound(PackagingPyPIError): """Project has not been found""" -class DistributionNotFound(DistutilsIndexError): +class DistributionNotFound(PackagingPyPIError): """The release has not been found""" -class ReleaseNotFound(DistutilsIndexError): +class ReleaseNotFound(PackagingPyPIError): """The release has not been found""" -class CantParseArchiveName(DistutilsIndexError): +class CantParseArchiveName(PackagingPyPIError): """An archive name can't be parsed to find distribution name and version""" -class DownloadError(DistutilsIndexError): +class DownloadError(PackagingPyPIError): """An error has occurs while downloading""" @@ -29,13 +27,13 @@ """Compared hashes does not match""" -class UnsupportedHashName(DistutilsIndexError): +class UnsupportedHashName(PackagingPyPIError): """A unsupported hashname has been used""" -class UnableToDownload(DistutilsIndexError): +class UnableToDownload(PackagingPyPIError): """All mirrors have been tried, without success""" -class InvalidSearchField(DistutilsIndexError): +class InvalidSearchField(PackagingPyPIError): """An invalid search field has been used""" diff --git a/distutils2/index/mirrors.py b/distutils2/pypi/mirrors.py rename from distutils2/index/mirrors.py rename to distutils2/pypi/mirrors.py --- a/distutils2/index/mirrors.py +++ b/distutils2/pypi/mirrors.py @@ -1,6 +1,4 @@ -"""Utilities related to the mirror infrastructure defined in PEP 381. -See http://www.python.org/dev/peps/pep-0381/ -""" +"""Utilities related to the mirror infrastructure defined in PEP 381.""" from string import ascii_lowercase import socket @@ -12,7 +10,7 @@ """Return the list of mirrors from the last record found on the DNS entry:: - >>> from distutils2.index.mirrors import get_mirrors + >>> from distutils2.pypi.mirrors import get_mirrors >>> get_mirrors() ['a.pypi.python.org', 'b.pypi.python.org', 'c.pypi.python.org', 'd.pypi.python.org'] @@ -46,7 +44,7 @@ def product(*args, **kwds): - pools = map(tuple, args) * kwds.get('repeat', 1) + pools = [tuple(arg) for arg in args] * kwds.get('repeat', 1) result = [[]] for pool in pools: result = [x + [y] for x in result for y in pool] diff --git a/distutils2/index/simple.py b/distutils2/pypi/simple.py rename from distutils2/index/simple.py rename to distutils2/pypi/simple.py --- a/distutils2/index/simple.py +++ b/distutils2/pypi/simple.py @@ -1,10 +1,11 @@ -"""index.simple +"""Spider using the screen-scraping "simple" PyPI API. -Contains the class "SimpleIndexCrawler", a simple spider to find and retrieve -distributions on the Python Package Index, using its "simple" API, -avalaible at http://pypi.python.org/simple/ +This module contains the class Crawler, a simple spider that +can be used to find and retrieve distributions from a project index +(like the Python Package Index), using its so-called simple API (see +reference implementation available at http://pypi.python.org/simple/). """ -from fnmatch import translate + import httplib import re import socket @@ -13,17 +14,20 @@ import urlparse import os +from fnmatch import translate +from functools import wraps from distutils2 import logger -from distutils2.index.base import BaseClient -from distutils2.index.dist import (ReleasesList, EXTENSIONS, - get_infos_from_url, MD5_HASH) -from distutils2.index.errors import (DistutilsIndexError, DownloadError, - UnableToDownload, CantParseArchiveName, - ReleaseNotFound, ProjectNotFound) -from distutils2.index.mirrors import get_mirrors from distutils2.metadata import Metadata from distutils2.version import get_version_predicate -from distutils2 import __version__ as __distutils2_version__ +from distutils2 import __version__ as distutils2_version +from distutils2.pypi.base import BaseClient +from distutils2.pypi.dist import (ReleasesList, EXTENSIONS, + get_infos_from_url, MD5_HASH) +from distutils2.pypi.errors import (PackagingPyPIError, DownloadError, + UnableToDownload, CantParseArchiveName, + ReleaseNotFound, ProjectNotFound) +from distutils2.pypi.mirrors import get_mirrors +from distutils2.metadata import Metadata __all__ = ['Crawler', 'DEFAULT_SIMPLE_INDEX_URL'] @@ -32,7 +36,7 @@ DEFAULT_HOSTS = ("*",) SOCKET_TIMEOUT = 15 USER_AGENT = "Python-urllib/%s distutils2/%s" % ( - sys.version[:3], __distutils2_version__) + sys.version[:3], distutils2_version) # -- Regexps ------------------------------------------------- EGG_FRAGMENT = re.compile(r'^egg=([-A-Za-z0-9_.]+)$') @@ -48,8 +52,9 @@ def socket_timeout(timeout=SOCKET_TIMEOUT): """Decorator to add a socket timeout when requesting pages on PyPI. """ - def _socket_timeout(func): - def _socket_timeout(self, *args, **kwargs): + def wrapper(func): + @wraps(func) + def wrapped(self, *args, **kwargs): old_timeout = socket.getdefaulttimeout() if hasattr(self, "_timeout"): timeout = self._timeout @@ -58,13 +63,14 @@ return func(self, *args, **kwargs) finally: socket.setdefaulttimeout(old_timeout) - return _socket_timeout - return _socket_timeout + return wrapped + return wrapper def with_mirror_support(): """Decorator that makes the mirroring support easier""" def wrapper(func): + @wraps(func) def wrapped(self, *args, **kwargs): try: return func(self, *args, **kwargs) @@ -103,7 +109,7 @@ :param follow_externals: tell if following external links is needed or not. Default is False. :param mirrors_url: the url to look on for DNS records giving mirror - adresses. + addresses. :param mirrors: a list of mirrors (see PEP 381). :param timeout: time in seconds to consider a url has timeouted. :param mirrors_max_tries": number of times to try requesting informations @@ -113,13 +119,20 @@ def __init__(self, index_url=DEFAULT_SIMPLE_INDEX_URL, prefer_final=False, prefer_source=True, hosts=DEFAULT_HOSTS, follow_externals=False, mirrors_url=None, mirrors=None, - timeout=SOCKET_TIMEOUT, mirrors_max_tries=0): + timeout=SOCKET_TIMEOUT, mirrors_max_tries=0, verbose=False): super(Crawler, self).__init__(prefer_final, prefer_source) self.follow_externals = follow_externals + self.verbose = verbose # mirroring attributes. - if not index_url.endswith("/"): - index_url += "/" + parsed = urlparse.urlparse(index_url) + self.scheme = parsed[0] + if self.scheme == 'file': + ender = os.path.sep + else: + ender = '/' + if not index_url.endswith(ender): + index_url += ender # if no mirrors are defined, use the method described in PEP 381. if mirrors is None: mirrors = get_mirrors(mirrors_url) @@ -145,33 +158,39 @@ Return a list of names. """ - index = self._open_url(self.index_url) - if '*' in name: - name.replace('*', '.*') - else: - name = "%s%s%s" % ('*.?', name, '*.?') - name = name.replace('*', '[^<]*') # avoid matching of the tag's end - projectname = re.compile("""]*>(%s)""" % name, flags=re.I) - matching_projects = [] - for match in projectname.finditer(index.read()): + with self._open_url(self.index_url) as index: + if '*' in name: + name.replace('*', '.*') + else: + name = "%s%s%s" % ('*.?', name, '*.?') + name = name.replace('*', '[^<]*') # avoid matching end tag + projectname = re.compile(']*>(%s)' % name, re.I) + matching_projects = [] + + index_content = index.read() + + # FIXME should use bytes I/O and regexes instead of decoding + index_content = index_content.decode() + + for match in projectname.finditer(index_content): project_name = match.group(1) matching_projects.append(self._get_project(project_name)) return matching_projects def get_releases(self, requirements, prefer_final=None, force_update=False): - """Search for releases and return a ReleaseList object containing + """Search for releases and return a ReleasesList object containing the results. """ predicate = get_version_predicate(requirements) if predicate.name.lower() in self._projects and not force_update: return self._projects.get(predicate.name.lower()) prefer_final = self._get_prefer_final(prefer_final) - logger.info('reading info on PyPI about %s', predicate.name) + logger.debug('Reading info on PyPI about %s', predicate.name) self._process_index_page(predicate.name) if predicate.name.lower() not in self._projects: - raise ProjectNotFound() + raise ProjectNotFound releases = self._projects.get(predicate.name.lower()) releases.sort_releases(prefer_final=prefer_final) @@ -199,10 +218,10 @@ Currently, download one archive, extract it and use the PKG-INFO file. """ release = self.get_distributions(project_name, version) - if not release._metadata: + if not release.metadata: location = release.get_distribution().unpack() pkg_info = os.path.join(location, 'PKG-INFO') - release._metadata = Metadata(pkg_info) + release.metadata = Metadata(pkg_info) return release def _switch_to_next_mirror(self): @@ -213,7 +232,8 @@ """ self._mirrors_used.add(self.index_url) index_url = self._mirrors.pop() - if not ("http://" or "https://" or "file://") in index_url: + # XXX use urlparse for a real check of missing scheme part + if not index_url.startswith(("http://", "https://", "file://")): index_url = "http://%s" % index_url if not index_url.endswith("/simple"): @@ -264,9 +284,8 @@ name = release.name else: name = release_info['name'] - if not name.lower() in self._projects: - self._projects[name.lower()] = ReleasesList(name, - index=self._index) + if name.lower() not in self._projects: + self._projects[name.lower()] = ReleasesList(name, index=self._index) if release: self._projects[name.lower()].add_release(release=release) @@ -292,27 +311,32 @@ method on it) """ f = self._open_url(url) - base_url = f.url - if url not in self._processed_urls: - self._processed_urls.append(url) - link_matcher = self._get_link_matcher(url) - for link, is_download in link_matcher(f.read(), base_url): - if link not in self._processed_urls: - if self._is_distribution(link) or is_download: - self._processed_urls.append(link) - # it's a distribution, so create a dist object - try: - infos = get_infos_from_url(link, project_name, - is_external=not self.index_url in url) - except CantParseArchiveName, e: - logger.warning( - "version has not been parsed: %s", e) + try: + base_url = f.url + if url not in self._processed_urls: + self._processed_urls.append(url) + link_matcher = self._get_link_matcher(url) + for link, is_download in link_matcher(f.read().decode(), base_url): + if link not in self._processed_urls: + if self._is_distribution(link) or is_download: + self._processed_urls.append(link) + # it's a distribution, so create a dist object + try: + infos = get_infos_from_url(link, project_name, + is_external=self.index_url not in url) + except CantParseArchiveName: + e = sys.exc_info()[1] + if self.verbose: + logger.warning( + "version has not been parsed: %s", e) + else: + self._register_release(release_info=infos) else: - self._register_release(release_info=infos) - else: - if self._is_browsable(link) and follow_links: - self._process_url(link, project_name, - follow_links=False) + if self._is_browsable(link) and follow_links: + self._process_url(link, project_name, + follow_links=False) + finally: + f.close() def _get_link_matcher(self, url): """Returns the right link matcher function of the given url @@ -331,6 +355,9 @@ This matches the simple index requirements for matching links. If follow_externals is set to False, dont yeld the external urls. + + :param content: the content of the page we want to parse + :param base_url: the url of this page. """ for match in HREF.finditer(content): url = self._get_full_url(match.group(1), base_url) @@ -340,7 +367,7 @@ for match in REL.finditer(content): # search for rel links. tag, rel = match.groups() - rels = map(str.strip, rel.lower().split(',')) + rels = [s.strip() for s in rel.lower().split(',')] if 'homepage' in rels or 'download' in rels: for match in HREF.finditer(tag): url = self._get_full_url(match.group(1), base_url) @@ -363,7 +390,11 @@ :param name: the name of the project to find the page """ # Browse and index the content of the given PyPI page. - url = self.index_url + name + "/" + if self.scheme == 'file': + ender = os.path.sep + else: + ender = '/' + url = self.index_url + name + ender self._process_url(url, name) @socket_timeout() @@ -376,19 +407,19 @@ # authentication stuff if scheme in ('http', 'https'): - auth, host = urllib2.splituser(netloc) + auth, host = urlparse.splituser(netloc) else: auth = None # add index.html automatically for filesystem paths if scheme == 'file': - if url.endswith('/'): + if url.endswith(os.path.sep): url += "index.html" # add authorization headers if auth is provided if auth: auth = "Basic " + \ - urllib2.unquote(auth).encode('base64').strip() + urlparse.unquote(auth).encode('base64').strip() new_url = urlparse.urlunparse(( scheme, host, path, params, query, frag)) request = urllib2.Request(new_url) @@ -398,17 +429,21 @@ request.add_header('User-Agent', USER_AGENT) try: fp = urllib2.urlopen(request) - except (ValueError, httplib.InvalidURL), v: + except (ValueError, httplib.InvalidURL): + v = sys.exc_info()[1] msg = ' '.join([str(arg) for arg in v.args]) - raise DistutilsIndexError('%s %s' % (url, msg)) - except urllib2.HTTPError, v: - return v - except urllib2.URLError, v: + raise PackagingPyPIError('%s %s' % (url, msg)) + except urllib2.HTTPError: + return sys.exc_info()[1] + except urllib2.URLError: + v = sys.exc_info()[1] raise DownloadError("Download error for %s: %s" % (url, v.reason)) - except httplib.BadStatusLine, v: + except httplib.BadStatusLine: + v = sys.exc_info()[1] raise DownloadError('%s returned a bad status line. ' 'The server might be down, %s' % (url, v.line)) - except httplib.HTTPException, v: + except httplib.HTTPException: + v = sys.exc_info()[1] raise DownloadError("Download error for %s: %s" % (url, v)) except socket.timeout: raise DownloadError("The server timeouted") @@ -430,9 +465,9 @@ elif what.startswith('#'): what = int(what[1:]) else: - from htmlentitydefs import name2codepoint + from html.entities import name2codepoint what = name2codepoint.get(what, match.group(0)) - return unichr(what) + return chr(what) def _htmldecode(self, text): """Decode HTML entities in the given text.""" diff --git a/distutils2/index/wrapper.py b/distutils2/pypi/wrapper.py rename from distutils2/index/wrapper.py rename to distutils2/pypi/wrapper.py --- a/distutils2/index/wrapper.py +++ b/distutils2/pypi/wrapper.py @@ -1,4 +1,10 @@ -from distutils2.index import simple, xmlrpc +"""Convenient client for all PyPI APIs. + +This module provides a ClientWrapper class which will use the "simple" +or XML-RPC API to request information or files from an index. +""" + +from distutils2.pypi import simple, xmlrpc _WRAPPER_MAPPINGS = {'get_release': 'simple', 'get_releases': 'simple', @@ -19,14 +25,14 @@ exception = None methods = [func] for f in wrapper._indexes.values(): - if f != func.im_self and hasattr(f, func.__name__): + if f != func.__self__ and hasattr(f, func.__name__): methods.append(getattr(f, func.__name__)) for method in methods: try: response = method(*args, **kwargs) retry = False - except Exception, e: - exception = e + except Exception: + exception = sys.exc_info()[1] if not retry: break if retry and exception: @@ -43,7 +49,7 @@ mappings. If one of the indexes returns an error, tries to use others indexes. - :param index: tell wich index to rely on by default. + :param index: tell which index to rely on by default. :param index_classes: a dict of name:class to use as indexes. :param indexes: a dict of name:index already instantiated :param mappings: the mappings to use for this wrapper @@ -58,7 +64,7 @@ # instantiate the classes and set their _project attribute to the one # of the wrapper. - for name, cls in index_classes.iteritems(): + for name, cls in index_classes.items(): obj = self._indexes.setdefault(name, cls()) obj._projects = self._projects obj._index = self diff --git a/distutils2/index/xmlrpc.py b/distutils2/pypi/xmlrpc.py rename from distutils2/index/xmlrpc.py rename to distutils2/pypi/xmlrpc.py --- a/distutils2/index/xmlrpc.py +++ b/distutils2/pypi/xmlrpc.py @@ -1,12 +1,20 @@ -import logging -import xmlrpclib +"""Spider using the XML-RPC PyPI API. +This module contains the class Client, a spider that can be used to find +and retrieve distributions from a project index (like the Python Package +Index), using its XML-RPC API (see documentation of the reference +implementation at http://wiki.python.org/moin/PyPiXmlRpc). +""" + +import xmlrpclib, sys + +from distutils2 import logger from distutils2.errors import IrrationalVersionError -from distutils2.index.base import BaseClient -from distutils2.index.errors import (ProjectNotFound, InvalidSearchField, - ReleaseNotFound) -from distutils2.index.dist import ReleaseInfo from distutils2.version import get_version_predicate +from distutils2.pypi.base import BaseClient +from distutils2.pypi.errors import (ProjectNotFound, InvalidSearchField, + ReleaseNotFound) +from distutils2.pypi.dist import ReleaseInfo __all__ = ['Client', 'DEFAULT_XMLRPC_INDEX_URL'] @@ -23,11 +31,11 @@ If no server_url is specified, use the default PyPI XML-RPC URL, defined in the DEFAULT_XMLRPC_INDEX_URL constant:: - >>> client = XMLRPCClient() + >>> client = Client() >>> client.server_url == DEFAULT_XMLRPC_INDEX_URL True - >>> client = XMLRPCClient("http://someurl/") + >>> client = Client("http://someurl/") >>> client.server_url 'http://someurl/' """ @@ -46,8 +54,8 @@ predicate = get_version_predicate(requirements) releases = self.get_releases(predicate.name) release = releases.get_last(predicate, prefer_final) - self.get_metadata(release.name, "%s" % release.version) - self.get_distributions(release.name, "%s" % release.version) + self.get_metadata(release.name, str(release.version)) + self.get_distributions(release.name, str(release.version)) return release def get_releases(self, requirements, prefer_final=None, show_hidden=True, @@ -61,7 +69,7 @@ informations (eg. make a new XML-RPC call). :: - >>> client = XMLRPCClient() + >>> client = Client() >>> client.get_releases('Foo') ['1.1', '1.2', '1.3'] @@ -84,8 +92,7 @@ # list of releases that does not contains hidden ones all_versions = get_versions(project_name, show_hidden) existing_versions = project.get_versions() - hidden_versions = list(set(all_versions) - - set(existing_versions)) + hidden_versions = set(all_versions) - set(existing_versions) for version in hidden_versions: project.add_release(release=ReleaseInfo(project_name, version, index=self._index)) @@ -103,6 +110,7 @@ project.sort_releases(prefer_final) return project + def get_distributions(self, project_name, version): """Grab informations about distributions from XML-RPC. @@ -163,8 +171,9 @@ project.add_release(release=ReleaseInfo(p['name'], p['version'], metadata={'summary': p['summary']}, index=self._index)) - except IrrationalVersionError, e: - logging.warn("Irrational version error found: %s", e) + except IrrationalVersionError: + e = sys.exc_info()[1] + logger.warning("Irrational version error found: %s", e) return [self._projects[p['name'].lower()] for p in projects] def get_all_projects(self): @@ -181,7 +190,7 @@ If no server proxy is defined yet, creates a new one:: - >>> client = XmlRpcClient() + >>> client = Client() >>> client.proxy() diff --git a/distutils2/pysetup b/distutils2/pysetup deleted file mode 100755 --- a/distutils2/pysetup +++ /dev/null @@ -1,5 +0,0 @@ -#!/usr/bin/env python -from distutils2.run import main - -if __name__ == "__main__": - main() diff --git a/distutils2/resources.py b/distutils2/resources.py deleted file mode 100644 --- a/distutils2/resources.py +++ /dev/null @@ -1,25 +0,0 @@ -import os - -from distutils2.util import iglob - - -def _rel_path(base, path): - assert path.startswith(base) - return path[len(base):].lstrip('/') - - -def resources_dests(resources_root, rules): - """find destination of resources files""" - destinations = {} - for (base, suffix, dest) in rules: - prefix = os.path.join(resources_root, base) - for abs_base in iglob(prefix): - abs_glob = os.path.join(abs_base, suffix) - for abs_path in iglob(abs_glob): - resource_file = _rel_path(resources_root, abs_path) - if dest is None: # remove the entry if it was here - destinations.pop(resource_file, None) - else: - rel_path = _rel_path(abs_base, abs_path) - destinations[resource_file] = os.path.join(dest, rel_path) - return destinations diff --git a/distutils2/run.py b/distutils2/run.py --- a/distutils2/run.py +++ b/distutils2/run.py @@ -1,35 +1,26 @@ -"""distutils2.dispatcher +"""Main command line parser. Implements the pysetup script.""" -Parses the command line. -""" +import os +import re +import sys +import getopt import logging -import re -import os -import sys - -from distutils2.errors import DistutilsError, CCompilerError -from distutils2._backport.pkgutil import get_distributions, get_distribution -from distutils2.depgraph import generate_graph -from distutils2.install import install, remove -from distutils2.dist import Distribution - -from distutils2.command import get_command_class, STANDARD_COMMANDS - -from distutils2.errors import (DistutilsOptionError, DistutilsArgError, - DistutilsModuleError, DistutilsClassError) from distutils2 import logger +from distutils2.dist import Distribution +from distutils2.util import _is_archive_file, generate_setup_py +from distutils2.command import get_command_class, STANDARD_COMMANDS +from distutils2.install import install, install_local_project, remove +from distutils2.database import get_distribution, get_distributions +from distutils2.depgraph import generate_graph from distutils2.fancy_getopt import FancyGetopt +from distutils2.errors import (PackagingArgError, PackagingError, + PackagingModuleError, PackagingClassError, + CCompilerError) + command_re = re.compile(r'^[a-zA-Z]([a-zA-Z0-9_]*)$') -run_usage = """\ -usage: pysetup run [global_opts] cmd1 [cmd1_opts] [cmd2 [cmd2_opts] ...] - or: pysetup run --help - or: pysetup run --list-commands - or: pysetup run cmd --help -""" - common_usage = """\ Actions: %(actions)s @@ -39,77 +30,252 @@ pysetup action --help """ -global_options = [('verbose', 'v', "run verbosely (default)", 1), - ('quiet', 'q', "run quietly (turns verbosity off)"), - ('dry-run', 'n', "don't actually do anything"), - ('help', 'h', "show detailed help message"), - ('no-user-cfg', None, - 'ignore pydistutils.cfg in your home directory'), - ('version', None, 'Display the version'), - ] +create_usage = """\ +Usage: pysetup create + or: pysetup create --help + +Create a new Python project. +""" + +generate_usage = """\ +Usage: pysetup generate-setup + or: pysetup generate-setup --help + +Generate a setup.py script for backward-compatibility purposes. +""" + + +graph_usage = """\ +Usage: pysetup graph dist + or: pysetup graph --help + +Print dependency graph for the distribution. + +positional arguments: + dist installed distribution name +""" + +install_usage = """\ +Usage: pysetup install [dist] + or: pysetup install [archive] + or: pysetup install [src_dir] + or: pysetup install --help + +Install a Python distribution from the indexes, source directory, or sdist. + +positional arguments: + archive path to source distribution (zip, tar.gz) + dist distribution name to install from the indexes + scr_dir path to source directory + +""" + +metadata_usage = """\ +Usage: pysetup metadata [dist] [-f field ...] + or: pysetup metadata [dist] [--all] + or: pysetup metadata --help + +Print metadata for the distribution. + +positional arguments: + dist installed distribution name + +optional arguments: + -f metadata field to print + --all print all metadata fields +""" + +remove_usage = """\ +Usage: pysetup remove dist [-y] + or: pysetup remove --help + +Uninstall a Python distribution. + +positional arguments: + dist installed distribution name + +optional arguments: + -y auto confirm distribution removal +""" + +run_usage = """\ +Usage: pysetup run [global_opts] cmd1 [cmd1_opts] [cmd2 [cmd2_opts] ...] + or: pysetup run --help + or: pysetup run --list-commands + or: pysetup run cmd --help +""" + +list_usage = """\ +Usage: pysetup list dist [dist ...] + or: pysetup list --help + or: pysetup list --all + +Print name, version and location for the matching installed distributions. + +positional arguments: + dist installed distribution name + +optional arguments: + --all list all installed distributions +""" + +search_usage = """\ +Usage: pysetup search [project] [--simple [url]] [--xmlrpc [url] [--fieldname value ...] --operator or|and] + or: pysetup search --help + +Search the indexes for the matching projects. + +positional arguments: + project the project pattern to search for + +optional arguments: + --xmlrpc [url] wether to use the xmlrpc index or not. If an url is + specified, it will be used rather than the default one. + + --simple [url] wether to use the simple index or not. If an url is + specified, it will be used rather than the default one. + + --fieldname value Make a search on this field. Can only be used if + --xmlrpc has been selected or is the default index. + + --operator or|and Defines what is the operator to use when doing xmlrpc + searchs with multiple fieldnames. Can only be used if + --xmlrpc has been selected or is the default index. +""" + +global_options = [ + # The fourth entry for verbose means that it can be repeated. + ('verbose', 'v', "run verbosely (default)", True), + ('quiet', 'q', "run quietly (turns verbosity off)"), + ('dry-run', 'n', "don't actually do anything"), + ('help', 'h', "show detailed help message"), + ('no-user-cfg', None, 'ignore pydistutils.cfg in your home directory'), + ('version', None, 'Display the version'), +] + +negative_opt = {'quiet': 'verbose'} display_options = [ - ('help-commands', None, - "list all available commands"), - ] - + ('help-commands', None, "list all available commands"), +] display_option_names = [x[0].replace('-', '_') for x in display_options] -negative_opt = {'quiet': 'verbose'} -def _set_logger(): - logger.setLevel(logging.INFO) - sth = logging.StreamHandler(sys.stderr) - sth.setLevel(logging.INFO) - logger.addHandler(sth) - logger.propagate = 0 +def _parse_args(args, options, long_options): + """Transform sys.argv input into a dict. + + :param args: the args to parse (i.e sys.argv) + :param options: the list of options to pass to getopt + :param long_options: the list of string with the names of the long options + to be passed to getopt. + + The function returns a dict with options/long_options as keys and matching + values as values. + """ + optlist, args = getopt.gnu_getopt(args, options, long_options) + optdict = {} + optdict['args'] = args + for k, v in optlist: + k = k.lstrip('-') + if k not in optdict: + optdict[k] = [] + if v: + optdict[k].append(v) + else: + optdict[k].append(v) + return optdict + + +class action_help(object): + """Prints a help message when the standard help flags: -h and --help + are used on the commandline. + """ + + def __init__(self, help_msg): + self.help_msg = help_msg + + def __call__(self, f): + def wrapper(*args, **kwargs): + f_args = args[1] + if '--help' in f_args or '-h' in f_args: + print(self.help_msg) + return + return f(*args, **kwargs) + return wrapper + + + at action_help(create_usage) +def _create(distpatcher, args, **kw): + from distutils2.create import main + return main() + + + at action_help(generate_usage) +def _generate(distpatcher, args, **kw): + generate_setup_py() + logger.info('The setup.py was generated') + + + at action_help(graph_usage) def _graph(dispatcher, args, **kw): - # XXX - dists = get_distributions(use_egg_info=True) - graph = generate_graph(dists) - print(graph) - return 0 - - - name = args[0] + name = args[1] dist = get_distribution(name, use_egg_info=True) if dist is None: - print('Distribution not found.') + logger.warning('Distribution not found.') + return 1 else: dists = get_distributions(use_egg_info=True) graph = generate_graph(dists) print(graph.repr_node(dist)) - return 0 + at action_help(install_usage) +def _install(dispatcher, args, **kw): + # first check if we are in a source directory + if len(args) < 2: + # are we inside a project dir? + if os.path.isfile('setup.cfg') or os.path.isfile('setup.py'): + args.insert(1, os.getcwd()) + else: + logger.warning('No project to install.') + return 1 + target = args[1] + # installing from a source dir or archive file? + if os.path.isdir(target) or _is_archive_file(target): + return not install_local_project(target) + else: + # download from PyPI + return not install(target) -def _search(dispatcher, args, **kw): - search = args[0].lower() - for dist in get_distributions(use_egg_info=True): - name = dist.name.lower() - if search in name: - print('%s %s at %s' % (dist.name, dist.metadata['version'], - dist.path)) - return 0 + at action_help(metadata_usage) +def _metadata(dispatcher, args, **kw): + opts = _parse_args(args[1:], 'f:', ['all']) + if opts['args']: + name = opts['args'][0] + dist = get_distribution(name, use_egg_info=True) + if dist is None: + logger.warning('%r not installed', name) + return 1 + elif os.path.isfile('setup.cfg'): + logger.info('searching local dir for metadata') + dist = Distribution() # XXX use config module + dist.parse_config_files() + else: + logger.warning('no argument given and no local setup.cfg found') + return 1 - -def _metadata(dispatcher, args, **kw): - ### XXX Needs to work on any installed package as well - from distutils2.dist import Distribution - dist = Distribution() - dist.parse_config_files() metadata = dist.metadata - if 'all' in args: + if 'all' in opts: keys = metadata.keys() else: - keys = args - if len(keys) == 1: - print metadata[keys[0]] - return + if 'f' in opts: + keys = (k for k in opts['f'] if k in metadata) + else: + keys = () for key in keys: if key in metadata: @@ -117,26 +283,40 @@ value = metadata[key] if isinstance(value, list): for v in value: - print(' ' + v) + print(' ', v) else: - print(' ' + value.replace('\n', '\n ')) - return 0 + print(' ', value.replace('\n', '\n ')) + + at action_help(remove_usage) +def _remove(distpatcher, args, **kw): + opts = _parse_args(args[1:], 'y', []) + if 'y' in opts: + auto_confirm = True + else: + auto_confirm = False + + retcode = 0 + for dist in set(opts['args']): + try: + remove(dist, auto_confirm=auto_confirm) + except PackagingError: + logger.warning('%r not installed', dist) + retcode = 1 + + return retcode + + + at action_help(run_usage) def _run(dispatcher, args, **kw): parser = dispatcher.parser args = args[1:] commands = STANDARD_COMMANDS # + extra commands - # do we have a global option ? - if args in (['--help'], []): - print(run_usage) - return - if args == ['--list-commands']: print('List of available commands:') - cmds = list(commands) - cmds.sort() + cmds = sorted(commands) for cmd in cmds: cls = dispatcher.cmdclass.get(cmd) or get_command_class(cmd) @@ -160,53 +340,63 @@ # XXX still need to be extracted from Distribution dist.parse_config_files() - - try: - for cmd in dispatcher.commands: - dist.run_command(cmd, dispatcher.command_options[cmd]) - - except KeyboardInterrupt: - raise SystemExit("interrupted") - except (IOError, os.error, DistutilsError, CCompilerError), msg: - raise SystemExit("error: " + str(msg)) + for cmd in dispatcher.commands: + dist.run_command(cmd, dispatcher.command_options[cmd]) # XXX this is crappy return dist -def _install(dispatcher, args, **kw): - install(args[0]) - return 0 -def _remove(distpatcher, args, **kw): - remove(options.remove) - return 0 + at action_help(list_usage) +def _list(dispatcher, args, **kw): + opts = _parse_args(args[1:], '', ['all']) + dists = get_distributions(use_egg_info=True) + if 'all' in opts or opts['args'] == []: + results = dists + listall = True + else: + results = (d for d in dists if d.name.lower() in opts['args']) + listall = False -def _create(distpatcher, args, **kw): - from distutils2.mkcfg import main - main() - return 0 + number = 0 + for dist in results: + print('%r %s (from %r)' % (dist.name, dist.version, dist.path)) + number += 1 + if number == 0: + if listall: + logger.info('Nothing seems to be installed.') + else: + logger.warning('No matching distribution found.') + return 1 + else: + logger.info('Found %d projects installed.', number) -actions = [('run', 'Run one or several commands', _run), - ('metadata', 'Display the metadata of a project', _metadata), - ('install', 'Install a project', _install), - ('remove', 'Remove a project', _remove), - ('search', 'Search for a project', _search), - ('graph', 'Display a graph', _graph), - ('create', 'Create a Project', _create),] + at action_help(search_usage) +def _search(dispatcher, args, **kw): + """The search action. + It is able to search for a specific index (specified with --index), using + the simple or xmlrpc index types (with --type xmlrpc / --type simple) + """ + #opts = _parse_args(args[1:], '', ['simple', 'xmlrpc']) + # 1. what kind of index is requested ? (xmlrpc / simple) + logger.error('not implemented') + return 1 -def fix_help_options(options): - """Convert a 4-tuple 'help_options' list as found in various command - classes to the 3-tuple form required by FancyGetopt. - """ - new_options = [] - for help_tuple in options: - new_options.append(help_tuple[0:3]) - return new_options - +actions = [ + ('run', 'Run one or several commands', _run), + ('metadata', 'Display the metadata of a project', _metadata), + ('install', 'Install a project', _install), + ('remove', 'Remove a project', _remove), + ('search', 'Search for a project in the indexes', _search), + ('list', 'List installed releases', _list), + ('graph', 'Display a graph', _graph), + ('create', 'Create a project', _create), + ('generate-setup', 'Generate a backward-comptatible setup.py', _generate), +] class Dispatcher(object): @@ -214,22 +404,21 @@ """ def __init__(self, args=None): self.verbose = 1 - self.dry_run = 0 - self.help = 0 - self.script_name = 'pysetup' + self.dry_run = False + self.help = False self.cmdclass = {} self.commands = [] self.command_options = {} for attr in display_option_names: - setattr(self, attr, 0) + setattr(self, attr, False) self.parser = FancyGetopt(global_options + display_options) self.parser.set_negative_aliases(negative_opt) + # FIXME this parses everything, including command options (e.g. "run + # build -i" errors with "option -i not recognized") args = self.parser.getopt(args=args, object=self) - #args = args[1:] - # if first arg is "run", we have some commands if len(args) == 0: self.action = None @@ -239,31 +428,44 @@ allowed = [action[0] for action in actions] + [None] if self.action not in allowed: msg = 'Unrecognized action "%s"' % self.action - raise DistutilsArgError(msg) + raise PackagingArgError(msg) - # setting up the logger - handler = logging.StreamHandler() - logger.addHandler(handler) - - if self.verbose: - handler.setLevel(logging.DEBUG) - else: - handler.setLevel(logging.INFO) + self._set_logger() + self.args = args # for display options we return immediately - option_order = self.parser.get_option_order() - - self.args = args - if self.help or self.action is None: self._show_help(self.parser, display_options_=False) - return + + def _set_logger(self): + # setting up the logging level from the command-line options + # -q gets warning, error and critical + if self.verbose == 0: + level = logging.WARNING + # default level or -v gets info too + # XXX there's a bug somewhere: the help text says that -v is default + # (and verbose is set to 1 above), but when the user explicitly gives + # -v on the command line, self.verbose is incremented to 2! Here we + # compensate for that (I tested manually). On a related note, I think + # it's a good thing to use -q/nothing/-v/-vv on the command line + # instead of logging constants; it will be easy to add support for + # logging configuration in setup.cfg for advanced users. --merwok + elif self.verbose in (1, 2): + level = logging.INFO + else: # -vv and more for debug + level = logging.DEBUG + + # setting up the stream handler + handler = logging.StreamHandler(sys.stderr) + handler.setLevel(level) + logger.addHandler(handler) + logger.setLevel(level) def _parse_command_opts(self, parser, args): # Pull the current command from the head of the command line command = args[0] if not command_re.match(command): - raise SystemExit("invalid command name %r" % command) + raise SystemExit("invalid command name %r" % (command,)) self.commands.append(command) # Dig up the command class that implements this command, so we @@ -271,24 +473,24 @@ # it takes. try: cmd_class = get_command_class(command) - except DistutilsModuleError, msg: - raise DistutilsArgError(msg) + except PackagingModuleError: + raise PackagingArgError(sys.exc_info()[1]) - # XXX We want to push this in distutils.command + # XXX We want to push this in distutils2.command # # Require that the command class be derived from Command -- want # to be sure that the basic "command" interface is implemented. for meth in ('initialize_options', 'finalize_options', 'run'): if hasattr(cmd_class, meth): continue - raise DistutilsClassError( + raise PackagingClassError( 'command %r must implement %r' % (cmd_class, meth)) # Also make sure that the command object provides a list of its # known options. if not (hasattr(cmd_class, 'user_options') and isinstance(cmd_class.user_options, list)): - raise DistutilsClassError( + raise PackagingClassError( "command class %s must provide " "'user_options' attribute (a list of tuples)" % cmd_class) @@ -303,7 +505,7 @@ # format (tuple of four) so we need to preprocess them here. if (hasattr(cmd_class, 'help_options') and isinstance(cmd_class.help_options, list)): - help_options = fix_help_options(cmd_class.help_options) + help_options = cmd_class.help_options[:] else: help_options = [] @@ -321,14 +523,14 @@ if (hasattr(cmd_class, 'help_options') and isinstance(cmd_class.help_options, list)): - help_option_found = 0 - for (help_option, short, desc, func) in cmd_class.help_options: + help_option_found = False + for help_option, short, desc, func in cmd_class.help_options: if hasattr(opts, help_option.replace('-', '_')): - help_option_found = 1 + help_option_found = True if hasattr(func, '__call__'): func() else: - raise DistutilsClassError( + raise PackagingClassError( "invalid help function %r for help option %r: " "must be a callable object (function, etc.)" % (func, help_option)) @@ -339,7 +541,7 @@ # Put the options from the command line into their official # holding pen, the 'command_options' dictionary. opt_dict = self.get_option_dict(command) - for (name, value) in vars(opts).iteritems(): + for name, value in vars(opts).items(): opt_dict[name] = ("command line", value) return args @@ -366,23 +568,23 @@ parser.print_help(usage + "\nGlobal options:") - def _show_help(self, parser, global_options_=1, display_options_=1, + def _show_help(self, parser, global_options_=True, display_options_=True, commands=[]): # late import because of mutual dependence between these modules from distutils2.command.cmd import Command print('Usage: pysetup [options] action [action_options]') - print('') + print() if global_options_: self.print_usage(self.parser) - print('') + print() if display_options_: parser.set_option_table(display_options) parser.print_help( "Information display options (just display " + "information, ignore any commands)") - print('') + print() for command in commands: if isinstance(command, type) and issubclass(command, Command): @@ -391,35 +593,30 @@ cls = get_command_class(command) if (hasattr(cls, 'help_options') and isinstance(cls.help_options, list)): - parser.set_option_table(cls.user_options + - fix_help_options(cls.help_options)) + parser.set_option_table(cls.user_options + cls.help_options) else: parser.set_option_table(cls.user_options) - parser.print_help("Options for %r command:" % cls.__name__) - print('') + print() def _show_command_help(self, command): - from distutils2.command.cmd import Command - if isinstance(command, str): + if isinstance(command, basestring): command = get_command_class(command) - name = command.get_command_name() - desc = getattr(command, 'description', '(no description available)') - print('Description: %s' % desc) - print('') + print('Description:', desc) + print() if (hasattr(command, 'help_options') and isinstance(command.help_options, list)): self.parser.set_option_table(command.user_options + - fix_help_options(command.help_options)) + command.help_options) else: self.parser.set_option_table(command.user_options) self.parser.print_help("Options:") - print('') + print() def _get_command_groups(self): """Helper function to retrieve all the command class names divided @@ -440,21 +637,16 @@ 'description'. """ std_commands, extra_commands = self._get_command_groups() - max_length = 0 - for cmd in list(std_commands) + list(extra_commands): - if len(cmd) > max_length: - max_length = len(cmd) + max_length = max(len(command) + for commands in (std_commands, extra_commands) + for command in commands) - self.print_command_list(std_commands, - "Standard commands", - max_length) + self.print_command_list(std_commands, "Standard commands", max_length) if extra_commands: - print - self.print_command_list(extra_commands, - "Extra commands", + print() + self.print_command_list(extra_commands, "Extra commands", max_length) - def print_command_list(self, commands, header, max_length): """Print a subset of the list of all commands -- used by 'print_commands()'. @@ -468,10 +660,10 @@ print(" %-*s %s" % (max_length, cmd, description)) - def __call__(self): if self.action is None: - return 0 + return + for action, desc, func in actions: if action == self.action: return func(self, self.args) @@ -479,11 +671,23 @@ def main(args=None): - dispatcher = Dispatcher(args) - if dispatcher.action is None: - return 0 + old_level = logger.level + old_handlers = list(logger.handlers) + try: + dispatcher = Dispatcher(args) + if dispatcher.action is None: + return + return dispatcher() + except KeyboardInterrupt: + logger.info('interrupted') + return 1 + except (IOError, os.error, PackagingError, CCompilerError): + logger.exception(sys.exc_info()[1]) + return 1 + finally: + logger.setLevel(old_level) + logger.handlers[:] = old_handlers - return dispatcher() if __name__ == '__main__': sys.exit(main()) diff --git a/distutils2/tests/__init__.py b/distutils2/tests/__init__.py --- a/distutils2/tests/__init__.py +++ b/distutils2/tests/__init__.py @@ -5,39 +5,23 @@ 'test' and contains a function test_suite(). The function is expected to return an initialized unittest.TestSuite instance. -Tests for the command classes in the distutils2.command package are -included in distutils2.tests as well, instead of using a separate -distutils2.command.tests package, since command identification is done -by import rather than matching pre-defined names. +Utility code is included in distutils2.tests.support. +""" -Always import unittest from this module, it will be the right version -(standard library unittest for 3.2 and higher, third-party unittest2 -release for older versions). - -Utility code is included in distutils2.tests.support. -""" +# Put this text back for the backport +#Always import unittest from this module, it will be the right version +#(standard library unittest for 3.2 and higher, third-party unittest2 +#elease for older versions). import os import sys +import unittest2 as unittest +from .support import TESTFN -if sys.version_info >= (3, 2): - # improved unittest package from 3.2's standard library - import unittest -else: - try: - # external release of same package for older versions - import unittest2 as unittest - except ImportError: - sys.exit('Error: You have to install unittest2') - -# use TESTFN from stdlib, pull in unlink for other modules to use as well -if sys.version_info[0] == 3: - from test.support import TESTFN, unlink -else : - from test.test_support import TESTFN, unlink +# XXX move helpers to support, add tests for them, remove things that +# duplicate test.support (or keep them for the backport; needs thinking) here = os.path.dirname(__file__) or os.curdir - verbose = 1 def test_suite(): @@ -50,6 +34,7 @@ suite.addTest(module.test_suite()) return suite + class Error(Exception): """Base class for regression test exceptions.""" @@ -88,12 +73,13 @@ def run_unittest(classes, verbose_=1): """Run tests from unittest.TestCase-derived classes. - Extracted from stdlib test.test_support and modified to support unittest. + Originally extracted from stdlib test.test_support and modified to + support unittest2. """ valid_types = (unittest.TestSuite, unittest.TestCase) suite = unittest.TestSuite() for cls in classes: - if isinstance(cls, str): + if isinstance(cls, basestring): if cls in sys.modules: suite.addTest(unittest.findTestCases(sys.modules[cls])) else: @@ -111,7 +97,7 @@ stick around to hog resources and create problems when looking for refleaks. - Extracted from stdlib test.test_support. + Extracted from stdlib test.support. """ # Reap all our dead child processes so we don't leave zombies around. @@ -127,10 +113,11 @@ except: break + def captured_stdout(func, *args, **kw): - import StringIO + from StringIO import StringIO orig_stdout = getattr(sys, 'stdout') - setattr(sys, 'stdout', StringIO.StringIO()) + setattr(sys, 'stdout', StringIO()) try: res = func(*args, **kw) sys.stdout.seek(0) @@ -138,12 +125,9 @@ finally: setattr(sys, 'stdout', orig_stdout) + def unload(name): try: del sys.modules[name] except KeyError: pass - - -if __name__ == "__main__": - unittest.main(defaultTest="test_suite") diff --git a/distutils2/tests/__main__.py b/distutils2/tests/__main__.py new file mode 100644 --- /dev/null +++ b/distutils2/tests/__main__.py @@ -0,0 +1,23 @@ +"""Packaging test suite runner.""" + +# Ripped from importlib tests, thanks Brett! + +import os +import sys +import unittest2 +from .support import run_unittest, reap_children, reap_threads + + + at reap_threads +def test_main(): + try: + start_dir = os.path.dirname(__file__) + top_dir = os.path.dirname(os.path.dirname(start_dir)) + test_loader = unittest2.TestLoader() + run_unittest(test_loader.discover(start_dir, top_level_dir=top_dir)) + finally: + reap_children() + + +if __name__ == '__main__': + test_main() diff --git a/distutils2/tests/fake_dists/babar-0.1.dist-info/INSTALLER b/distutils2/tests/fake_dists/babar-0.1.dist-info/INSTALLER new file mode 100644 diff --git a/distutils2/tests/fake_dists/babar-0.1.dist-info/METADATA b/distutils2/tests/fake_dists/babar-0.1.dist-info/METADATA new file mode 100644 --- /dev/null +++ b/distutils2/tests/fake_dists/babar-0.1.dist-info/METADATA @@ -0,0 +1,4 @@ +Metadata-version: 1.2 +Name: babar +Version: 0.1 +Author: FELD Boris \ No newline at end of file diff --git a/distutils2/tests/fake_dists/babar-0.1.dist-info/RECORD b/distutils2/tests/fake_dists/babar-0.1.dist-info/RECORD new file mode 100644 diff --git a/distutils2/tests/fake_dists/babar-0.1.dist-info/REQUESTED b/distutils2/tests/fake_dists/babar-0.1.dist-info/REQUESTED new file mode 100644 diff --git a/distutils2/tests/fake_dists/babar-0.1.dist-info/RESOURCES b/distutils2/tests/fake_dists/babar-0.1.dist-info/RESOURCES new file mode 100644 --- /dev/null +++ b/distutils2/tests/fake_dists/babar-0.1.dist-info/RESOURCES @@ -0,0 +1,2 @@ +babar.png,babar.png +babar.cfg,babar.cfg \ No newline at end of file diff --git a/distutils2/tests/fake_dists/babar.cfg b/distutils2/tests/fake_dists/babar.cfg new file mode 100644 --- /dev/null +++ b/distutils2/tests/fake_dists/babar.cfg @@ -0,0 +1,1 @@ +Config \ No newline at end of file diff --git a/distutils2/tests/fake_dists/babar.png b/distutils2/tests/fake_dists/babar.png new file mode 100644 diff --git a/distutils2/tests/fake_dists/bacon-0.1.egg-info/PKG-INFO b/distutils2/tests/fake_dists/bacon-0.1.egg-info/PKG-INFO new file mode 100644 --- /dev/null +++ b/distutils2/tests/fake_dists/bacon-0.1.egg-info/PKG-INFO @@ -0,0 +1,6 @@ +Metadata-Version: 1.2 +Name: bacon +Version: 0.1 +Provides-Dist: truffles (2.0) +Provides-Dist: bacon (0.1) +Obsoletes-Dist: truffles (>=0.9,<=1.5) diff --git a/distutils2/tests/fake_dists/banana-0.4.egg/EGG-INFO/PKG-INFO b/distutils2/tests/fake_dists/banana-0.4.egg/EGG-INFO/PKG-INFO new file mode 100644 --- /dev/null +++ b/distutils2/tests/fake_dists/banana-0.4.egg/EGG-INFO/PKG-INFO @@ -0,0 +1,18 @@ +Metadata-Version: 1.0 +Name: banana +Version: 0.4 +Summary: A yellow fruit +Home-page: http://en.wikipedia.org/wiki/Banana +Author: Josip Djolonga +Author-email: foo at nbar.com +License: BSD +Description: A fruit +Keywords: foo bar +Platform: UNKNOWN +Classifier: Development Status :: 4 - Beta +Classifier: Intended Audience :: Developers +Classifier: Intended Audience :: Science/Research +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Topic :: Scientific/Engineering :: GIS diff --git a/distutils2/tests/fake_dists/banana-0.4.egg/EGG-INFO/SOURCES.txt b/distutils2/tests/fake_dists/banana-0.4.egg/EGG-INFO/SOURCES.txt new file mode 100644 diff --git a/distutils2/tests/fake_dists/banana-0.4.egg/EGG-INFO/dependency_links.txt b/distutils2/tests/fake_dists/banana-0.4.egg/EGG-INFO/dependency_links.txt new file mode 100644 --- /dev/null +++ b/distutils2/tests/fake_dists/banana-0.4.egg/EGG-INFO/dependency_links.txt @@ -0,0 +1,1 @@ + diff --git a/distutils2/tests/fake_dists/banana-0.4.egg/EGG-INFO/entry_points.txt b/distutils2/tests/fake_dists/banana-0.4.egg/EGG-INFO/entry_points.txt new file mode 100644 --- /dev/null +++ b/distutils2/tests/fake_dists/banana-0.4.egg/EGG-INFO/entry_points.txt @@ -0,0 +1,3 @@ + + # -*- Entry points: -*- + \ No newline at end of file diff --git a/distutils2/tests/fake_dists/banana-0.4.egg/EGG-INFO/not-zip-safe b/distutils2/tests/fake_dists/banana-0.4.egg/EGG-INFO/not-zip-safe new file mode 100644 --- /dev/null +++ b/distutils2/tests/fake_dists/banana-0.4.egg/EGG-INFO/not-zip-safe @@ -0,0 +1,1 @@ + diff --git a/distutils2/tests/fake_dists/banana-0.4.egg/EGG-INFO/requires.txt b/distutils2/tests/fake_dists/banana-0.4.egg/EGG-INFO/requires.txt new file mode 100644 --- /dev/null +++ b/distutils2/tests/fake_dists/banana-0.4.egg/EGG-INFO/requires.txt @@ -0,0 +1,6 @@ +# this should be ignored + +strawberry >=0.5 + +[section ignored] +foo ==0.5 diff --git a/distutils2/tests/fake_dists/banana-0.4.egg/EGG-INFO/top_level.txt b/distutils2/tests/fake_dists/banana-0.4.egg/EGG-INFO/top_level.txt new file mode 100644 diff --git a/distutils2/tests/fake_dists/cheese-2.0.2.egg-info b/distutils2/tests/fake_dists/cheese-2.0.2.egg-info new file mode 100644 --- /dev/null +++ b/distutils2/tests/fake_dists/cheese-2.0.2.egg-info @@ -0,0 +1,5 @@ +Metadata-Version: 1.2 +Name: cheese +Version: 2.0.2 +Provides-Dist: truffles (1.0.2) +Obsoletes-Dist: truffles (!=1.2,<=2.0) diff --git a/distutils2/tests/fake_dists/choxie-2.0.0.9.dist-info/INSTALLER b/distutils2/tests/fake_dists/choxie-2.0.0.9.dist-info/INSTALLER new file mode 100644 diff --git a/distutils2/tests/fake_dists/choxie-2.0.0.9.dist-info/METADATA b/distutils2/tests/fake_dists/choxie-2.0.0.9.dist-info/METADATA new file mode 100644 --- /dev/null +++ b/distutils2/tests/fake_dists/choxie-2.0.0.9.dist-info/METADATA @@ -0,0 +1,9 @@ +Metadata-Version: 1.2 +Name: choxie +Version: 2.0.0.9 +Summary: Chocolate with a kick! +Requires-Dist: towel-stuff (0.1) +Requires-Dist: nut +Provides-Dist: truffles (1.0) +Obsoletes-Dist: truffles (<=0.8,>=0.5) +Obsoletes-Dist: truffles (<=0.9,>=0.6) diff --git a/distutils2/tests/fake_dists/choxie-2.0.0.9.dist-info/RECORD b/distutils2/tests/fake_dists/choxie-2.0.0.9.dist-info/RECORD new file mode 100644 diff --git a/distutils2/tests/fake_dists/choxie-2.0.0.9.dist-info/REQUESTED b/distutils2/tests/fake_dists/choxie-2.0.0.9.dist-info/REQUESTED new file mode 100644 diff --git a/distutils2/tests/fake_dists/choxie-2.0.0.9/choxie/__init__.py b/distutils2/tests/fake_dists/choxie-2.0.0.9/choxie/__init__.py new file mode 100644 --- /dev/null +++ b/distutils2/tests/fake_dists/choxie-2.0.0.9/choxie/__init__.py @@ -0,0 +1,1 @@ +# -*- coding: utf-8 -*- diff --git a/distutils2/tests/fake_dists/choxie-2.0.0.9/choxie/chocolate.py b/distutils2/tests/fake_dists/choxie-2.0.0.9/choxie/chocolate.py new file mode 100644 --- /dev/null +++ b/distutils2/tests/fake_dists/choxie-2.0.0.9/choxie/chocolate.py @@ -0,0 +1,10 @@ +# -*- coding: utf-8 -*- +from towel_stuff import Towel + +class Chocolate(object): + """A piece of chocolate.""" + + def wrap_with_towel(self): + towel = Towel() + towel.wrap(self) + return towel diff --git a/distutils2/tests/fake_dists/choxie-2.0.0.9/truffles.py b/distutils2/tests/fake_dists/choxie-2.0.0.9/truffles.py new file mode 100644 --- /dev/null +++ b/distutils2/tests/fake_dists/choxie-2.0.0.9/truffles.py @@ -0,0 +1,5 @@ +# -*- coding: utf-8 -*- +from choxie.chocolate import Chocolate + +class Truffle(Chocolate): + """A truffle.""" diff --git a/distutils2/tests/fake_dists/coconuts-aster-10.3.egg-info/PKG-INFO b/distutils2/tests/fake_dists/coconuts-aster-10.3.egg-info/PKG-INFO new file mode 100644 --- /dev/null +++ b/distutils2/tests/fake_dists/coconuts-aster-10.3.egg-info/PKG-INFO @@ -0,0 +1,5 @@ +Metadata-Version: 1.2 +Name: coconuts-aster +Version: 10.3 +Provides-Dist: strawberry (0.6) +Provides-Dist: banana (0.4) diff --git a/distutils2/tests/fake_dists/grammar-1.0a4.dist-info/INSTALLER b/distutils2/tests/fake_dists/grammar-1.0a4.dist-info/INSTALLER new file mode 100644 diff --git a/distutils2/tests/fake_dists/grammar-1.0a4.dist-info/METADATA b/distutils2/tests/fake_dists/grammar-1.0a4.dist-info/METADATA new file mode 100644 --- /dev/null +++ b/distutils2/tests/fake_dists/grammar-1.0a4.dist-info/METADATA @@ -0,0 +1,5 @@ +Metadata-Version: 1.2 +Name: grammar +Version: 1.0a4 +Requires-Dist: truffles (>=1.2) +Author: Sherlock Holmes diff --git a/distutils2/tests/fake_dists/grammar-1.0a4.dist-info/RECORD b/distutils2/tests/fake_dists/grammar-1.0a4.dist-info/RECORD new file mode 100644 diff --git a/distutils2/tests/fake_dists/grammar-1.0a4.dist-info/REQUESTED b/distutils2/tests/fake_dists/grammar-1.0a4.dist-info/REQUESTED new file mode 100644 diff --git a/distutils2/tests/fake_dists/grammar-1.0a4/grammar/__init__.py b/distutils2/tests/fake_dists/grammar-1.0a4/grammar/__init__.py new file mode 100644 --- /dev/null +++ b/distutils2/tests/fake_dists/grammar-1.0a4/grammar/__init__.py @@ -0,0 +1,1 @@ +# -*- coding: utf-8 -*- diff --git a/distutils2/tests/fake_dists/grammar-1.0a4/grammar/utils.py b/distutils2/tests/fake_dists/grammar-1.0a4/grammar/utils.py new file mode 100644 --- /dev/null +++ b/distutils2/tests/fake_dists/grammar-1.0a4/grammar/utils.py @@ -0,0 +1,8 @@ +# -*- coding: utf-8 -*- +from random import randint + +def is_valid_grammar(sentence): + if randint(0, 10) < 2: + return False + else: + return True diff --git a/distutils2/tests/fake_dists/nut-funkyversion.egg-info b/distutils2/tests/fake_dists/nut-funkyversion.egg-info new file mode 100644 --- /dev/null +++ b/distutils2/tests/fake_dists/nut-funkyversion.egg-info @@ -0,0 +1,3 @@ +Metadata-Version: 1.2 +Name: nut +Version: funkyversion diff --git a/distutils2/tests/fake_dists/strawberry-0.6.egg b/distutils2/tests/fake_dists/strawberry-0.6.egg new file mode 100644 index 0000000000000000000000000000000000000000..6d160e8b161031ae52638514843592187925b757 GIT binary patch literal 1402 zc$`yK)KALH(=X28%1l#;R!B%nEKbc!%uQ8LF-TCbRZuEUEh#N1$r9UQWv|0ty0Ufu2)H%gjmDgJ}xL0otCbPy`8 at OrXVy z$=M1e`3iV~M*-+)g_5F5g~as4%sjABpm0h{1UV)xlPkcRnT3l11j?rGw_!j6Vhl12 zuI}!-o_=or`X%`V at j0nwsX2Nj6(yk|oD9qiubF*7xU_i9RnLxBqqeU~_GC)B*8Q%^m+PITr6Z&8t31F+o4>xK^{d-p7?^ zx~J#&ZkWuS9>> server.fulladress() + >>> server.fulladdress() "http://ip:port/" It could be simple to have one HTTP server, relaying the requests to the two implementations (static HTTP and XMLRPC over HTTP). """ -import os.path +import os +import queue import select -import socket import threading - -# several packages had different names in Python 2.x -try: - import queue - import socketserver - from http.server import HTTPServer, SimpleHTTPRequestHandler - from xmlrpc.server import SimpleXMLRPCServer -except ImportError: - import Queue as queue - import SocketServer as socketserver - from BaseHTTPServer import HTTPServer - from SimpleHTTPServer import SimpleHTTPRequestHandler - from SimpleXMLRPCServer import SimpleXMLRPCServer +import socketserver +from functools import wraps +from http.server import HTTPServer, SimpleHTTPRequestHandler +from xmlrpc.server import SimpleXMLRPCServer from distutils2.tests import unittest -PYPI_DEFAULT_STATIC_PATH = os.path.dirname(os.path.abspath(__file__)) + "/pypiserver" +PYPI_DEFAULT_STATIC_PATH = os.path.join( + os.path.dirname(os.path.abspath(__file__)), 'pypiserver') + def use_xmlrpc_server(*server_args, **server_kwargs): server_kwargs['serve_xmlrpc'] = True return use_pypi_server(*server_args, **server_kwargs) + def use_http_server(*server_args, **server_kwargs): server_kwargs['serve_xmlrpc'] = False return use_pypi_server(*server_args, **server_kwargs) + def use_pypi_server(*server_args, **server_kwargs): """Decorator to make use of the PyPIServer for test methods, just when needed, and not for the entire duration of the testcase. """ def wrapper(func): + @wraps(func) def wrapped(*args, **kwargs): server = PyPIServer(*server_args, **server_kwargs) server.start() @@ -74,16 +70,15 @@ return wrapped return wrapper + class PyPIServerTestCase(unittest.TestCase): def setUp(self): super(PyPIServerTestCase, self).setUp() self.pypi = PyPIServer() self.pypi.start() + self.addCleanup(self.pypi.stop) - def tearDown(self): - super(PyPIServerTestCase, self).tearDown() - self.pypi.stop() class PyPIServer(threading.Thread): """PyPI Mocked server. @@ -93,8 +88,8 @@ """ def __init__(self, test_static_path=None, - static_filesystem_paths=["default"], - static_uri_paths=["simple"], serve_xmlrpc=False) : + static_filesystem_paths=None, + static_uri_paths=["simple", "packages"], serve_xmlrpc=False): """Initialize the server. Default behavior is to start the HTTP server. You can either start the @@ -110,6 +105,8 @@ threading.Thread.__init__(self) self._run = True self._serve_xmlrpc = serve_xmlrpc + if static_filesystem_paths is None: + static_filesystem_paths = ["default"] #TODO allow to serve XMLRPC and HTTP static files at the same time. if not self._serve_xmlrpc: @@ -118,15 +115,18 @@ self.request_queue = queue.Queue() self._requests = [] - self.default_response_status = 200 + self.default_response_status = 404 self.default_response_headers = [('Content-type', 'text/plain')] - self.default_response_data = "hello" + self.default_response_data = "The page does not exists" # initialize static paths / filesystems self.static_uri_paths = static_uri_paths + + # append the static paths defined locally if test_static_path is not None: static_filesystem_paths.append(test_static_path) - self.static_filesystem_paths = [PYPI_DEFAULT_STATIC_PATH + "/" + path + self.static_filesystem_paths = [ + PYPI_DEFAULT_STATIC_PATH + "/" + path for path in static_filesystem_paths] else: # XMLRPC server @@ -136,7 +136,7 @@ self.server.register_introspection_functions() self.server.register_instance(self.xmlrpc) - self.address = (self.server.server_name, self.server.server_port) + self.address = ('127.0.0.1', self.server.server_port) # to not have unwanted outputs. self.server.RequestHandlerClass.log_request = lambda *_: None @@ -150,6 +150,9 @@ def stop(self): """self shutdown is not supported for python < 2.6""" self._run = False + if self.is_alive(): + self.join() + self.server.server_close() def get_next_response(self): return (self.default_response_status, @@ -177,29 +180,23 @@ # we need to access the pypi server while serving the content pypi_server = None - def do_POST(self): - return self.serve_request() - def do_GET(self): - return self.serve_request() - def do_DELETE(self): - return self.serve_request() - def do_PUT(self): - return self.serve_request() - def serve_request(self): """Serve the content. Also record the requests to be accessed later. If trying to access an url matching a static uri, serve static content, otherwise serve what is provided by the `get_next_response` method. + + If nothing is defined there, return a 404 header. """ # record the request. Read the input only on PUT or POST requests if self.command in ("PUT", "POST"): - if 'content-length' in self.headers.dict: + if 'content-length' in self.headers: request_data = self.rfile.read( int(self.headers['content-length'])) else: request_data = self.rfile.read() + elif self.command in ("GET", "DELETE"): request_data = '' @@ -220,13 +217,19 @@ try: if self.path.endswith("/"): relative_path += "index.html" - file = open(fs_path + relative_path) - data = file.read() + if relative_path.endswith('.tar.gz'): - headers=[('Content-type', 'application/x-gtar')] + with open(fs_path + relative_path, 'br') as file: + data = file.read() + headers = [('Content-type', 'application/x-gtar')] else: - headers=[('Content-type', 'text/html')] + with open(fs_path + relative_path) as file: + data = file.read().encode() + headers = [('Content-type', 'text/html')] + + headers.append(('Content-Length', len(data))) self.make_response(data, headers=headers) + except IOError: pass @@ -239,6 +242,8 @@ status, headers, data = self.pypi_server.get_next_response() self.make_response(data, status, headers) + do_POST = do_GET = do_DELETE = do_PUT = serve_request + def make_response(self, data, status=200, headers=[('Content-type', 'text/html')]): """Send the response to the HTTP client""" @@ -254,18 +259,24 @@ for header, value in headers: self.send_header(header, value) self.end_headers() + + if type(data) is str: + data = data.encode() + self.wfile.write(data) + class PyPIXMLRPCServer(SimpleXMLRPCServer): def server_bind(self): """Override server_bind to store the server name.""" socketserver.TCPServer.server_bind(self) host, port = self.socket.getsockname()[:2] - self.server_name = socket.getfqdn(host) self.server_port = port + class MockDist(object): """Fake distribution, used in the Mock PyPI Server""" + def __init__(self, name, version="1.0", hidden=False, url="http://url/", type="sdist", filename="", size=10000, digest="123456", downloads=7, has_sig=False, @@ -377,6 +388,7 @@ 'summary': self.summary, } + class XMLRPCMockIndex(object): """Mock XMLRPC server""" @@ -386,7 +398,7 @@ def add_distributions(self, dists): for dist in dists: - self._dists.append(MockDist(**dist)) + self._dists.append(MockDist(**dist)) def set_distributions(self, dists): self._dists = [] diff --git a/distutils2/tests/pypi_test_server.py b/distutils2/tests/pypi_test_server.py new file mode 100644 --- /dev/null +++ b/distutils2/tests/pypi_test_server.py @@ -0,0 +1,59 @@ +"""Test PyPI Server implementation at testpypi.python.org, to use in tests. + +This is a drop-in replacement for the mock pypi server for testing against a +real pypi server hosted by python.org especially for testing against. +""" + +import unittest + +PYPI_DEFAULT_STATIC_PATH = None + + +def use_xmlrpc_server(*server_args, **server_kwargs): + server_kwargs['serve_xmlrpc'] = True + return use_pypi_server(*server_args, **server_kwargs) + + +def use_http_server(*server_args, **server_kwargs): + server_kwargs['serve_xmlrpc'] = False + return use_pypi_server(*server_args, **server_kwargs) + + +def use_pypi_server(*server_args, **server_kwargs): + """Decorator to make use of the PyPIServer for test methods, + just when needed, and not for the entire duration of the testcase. + """ + def wrapper(func): + def wrapped(*args, **kwargs): + server = PyPIServer(*server_args, **server_kwargs) + func(server=server, *args, **kwargs) + return wrapped + return wrapper + + +class PyPIServerTestCase(unittest.TestCase): + + def setUp(self): + super(PyPIServerTestCase, self).setUp() + self.pypi = PyPIServer() + self.pypi.start() + self.addCleanup(self.pypi.stop) + + +class PyPIServer(object): + """Shim to access testpypi.python.org, for testing a real server.""" + + def __init__(self, test_static_path=None, + static_filesystem_paths=["default"], + static_uri_paths=["simple"], serve_xmlrpc=False): + self.address = ('testpypi.python.org', '80') + + def start(self): + pass + + def stop(self): + pass + + @property + def full_address(self): + return "http://%s:%s" % self.address diff --git a/distutils2/tests/pypiserver/downloads_with_md5/packages/source/f/foobar/foobar-0.1.tar.gz b/distutils2/tests/pypiserver/downloads_with_md5/packages/source/f/foobar/foobar-0.1.tar.gz new file mode 100644 index 0000000000000000000000000000000000000000..333961eb18a6e7db80fefd41c339ab218d5180c4 GIT binary patch literal 110 zc$|~(=3uy!>FUeC{PvtR-ysJc)&sVu?9yZ7`(A1Di)P(6s!I71JWZ;--fWND`LA)=lAmk-7Jbj=XMlnFEsQ#U Kd|Vkc7#IK&xGYxy diff --git a/distutils2/tests/pypiserver/downloads_with_md5/simple/foobar/foobar-0.1.tar.gz b/distutils2/tests/pypiserver/downloads_with_md5/simple/foobar/foobar-0.1.tar.gz deleted file mode 100644 Binary file distutils2/tests/pypiserver/downloads_with_md5/simple/foobar/foobar-0.1.tar.gz has changed diff --git a/distutils2/tests/pypiserver/downloads_with_md5/simple/foobar/index.html b/distutils2/tests/pypiserver/downloads_with_md5/simple/foobar/index.html --- a/distutils2/tests/pypiserver/downloads_with_md5/simple/foobar/index.html +++ b/distutils2/tests/pypiserver/downloads_with_md5/simple/foobar/index.html @@ -1,3 +1,3 @@ -foobar-0.1.tar.gz
      +foobar-0.1.tar.gz
      diff --git a/distutils2/tests/support.py b/distutils2/tests/support.py --- a/distutils2/tests/support.py +++ b/distutils2/tests/support.py @@ -1,8 +1,7 @@ """Support code for distutils2 test cases. -Four helper classes are provided: LoggingCatcher, TempdirManager, -EnvironGuard and WarningsCatcher. They are written to be used as mixins, -e.g. :: +A few helper classes are provided: LoggingCatcher, TempdirManager and +EnvironRestorer. They are written to be used as mixins:: from distutils2.tests import unittest from distutils2.tests.support import LoggingCatcher @@ -21,83 +20,112 @@ tests of another command that needs them, a create_distribution function and a skip_unless_symlink decorator. +Also provided is a DummyCommand class, useful to mock commands in the +tests of another command that needs them, a create_distribution function +and a skip_unless_symlink decorator. + Each class or function has a docstring to explain its purpose and usage. """ +import codecs import os import shutil +import logging +import logging.handlers +import subprocess +import sys +import weakref import tempfile -import warnings -from copy import deepcopy -import logging +try: + import _thread, threading +except ImportError: + _thread = None + threading = None +try: + import zlib +except ImportError: + zlib = None -from distutils2 import logger from distutils2.dist import Distribution from distutils2.tests import unittest -__all__ = ['LoggingCatcher', 'WarningsCatcher', 'TempdirManager', - 'EnvironGuard', 'DummyCommand', 'unittest', 'create_distribution', - 'skip_unless_symlink'] +__all__ = ['LoggingCatcher', 'TempdirManager', 'EnvironRestorer', + 'DummyCommand', 'unittest', 'create_distribution', + 'skip_unless_symlink', 'requires_zlib'] + + +logger = logging.getLogger('distutils2') +logger2to3 = logging.getLogger('RefactoringTool') + + +class _TestHandler(logging.handlers.BufferingHandler): + # stolen and adapted from test.support + + def __init__(self): + logging.handlers.BufferingHandler.__init__(self, 0) + self.setLevel(logging.DEBUG) + + def shouldFlush(self): + return False + + def emit(self, record): + self.buffer.append(record) class LoggingCatcher(object): - """TestCase-compatible mixin to catch logging calls. + """TestCase-compatible mixin to receive logging calls. - Every log message that goes through distutils2.log will get appended to - self.logs instead of being printed. You can check that your code logs - warnings and errors as documented by inspecting that list; helper methods - get_logs and clear_logs are also provided. + Upon setUp, instances of this classes get a BufferingHandler that's + configured to record all messages logged to the 'distutils2' logger. + + Use get_logs to retrieve messages and self.loghandler.flush to discard + them. get_logs automatically flushes the logs; if you test code that + generates logging messages but don't use get_logs, you have to flush + manually before doing other checks on logging message, otherwise you + will get irrelevant results. See example in test_command_check. """ def setUp(self): super(LoggingCatcher, self).setUp() - # TODO read the new logging docs and/or the python-dev posts about - # logging and tests to properly use a handler instead of - # monkey-patching - self.old_log = logger._log - logger._log = self._log - logger.setLevel(logging.INFO) - self.logs = [] - - def _log(self, *args, **kw): - self.logs.append(args) + self.loghandler = handler = _TestHandler() + self._old_levels = logger.level, logger2to3.level + logger.addHandler(handler) + logger.setLevel(logging.DEBUG) # we want all messages + logger2to3.setLevel(logging.CRITICAL) # we don't want 2to3 messages def tearDown(self): - logger._log = self.old_log + handler = self.loghandler + # All this is necessary to properly shut down the logging system and + # avoid a regrtest complaint. Thanks to Vinay Sajip for the help. + handler.close() + logger.removeHandler(handler) + for ref in weakref.getweakrefs(handler): + logging._removeHandlerRef(ref) + del self.loghandler + logger.setLevel(self._old_levels[0]) + logger2to3.setLevel(self._old_levels[1]) super(LoggingCatcher, self).tearDown() def get_logs(self, *levels): - """Return a list of caught messages with level in `levels`. + """Return all log messages with level in *levels*. - Example: self.get_logs(log.WARN, log.DEBUG) -> list + Without explicit levels given, returns all messages. *levels* defaults + to all levels. For log calls with arguments (i.e. + logger.info('bla bla %r', arg)), the messages will be formatted before + being returned (e.g. "bla bla 'thing'"). + + Returns a list. Automatically flushes the loghandler after being + called. + + Example: self.get_logs(logging.WARN, logging.DEBUG). """ - def _format(msg, args): - if len(args) == 0: - return msg - return msg % args - return [_format(msg, args) for level, msg, args - in self.logs if level in levels] - - def clear_logs(self): - """Empty the internal list of caught messages.""" - del self.logs[:] - - -class WarningsCatcher(object): - - def setUp(self): - self._orig_showwarning = warnings.showwarning - warnings.showwarning = self._record_showwarning - self.warnings = [] - - def _record_showwarning(self, message, category, filename, lineno, - file=None, line=None): - self.warnings.append({"message": message, "category": category, - "filename": filename, "lineno": lineno, - "file": file, "line": line}) - - def tearDown(self): - warnings.showwarning = self._orig_showwarning + if not levels: + messages = [log.getMessage() for log in self.loghandler.buffer] + else: + messages = [log.getMessage() for log in self.loghandler.buffer + if log.levelno in levels] + self.loghandler.flush() + return messages class TempdirManager(object): @@ -109,24 +137,33 @@ def setUp(self): super(TempdirManager, self).setUp() + self._olddir = os.getcwd() self._basetempdir = tempfile.mkdtemp() + self._files = [] def tearDown(self): + for handle, name in self._files: + handle.close() + unlink(name) + + os.chdir(self._olddir) + shutil.rmtree(self._basetempdir) super(TempdirManager, self).tearDown() - shutil.rmtree(self._basetempdir, os.name in ('nt', 'cygwin')) def mktempfile(self): """Create a read-write temporary file and return it.""" fd, fn = tempfile.mkstemp(dir=self._basetempdir) os.close(fd) - return open(fn, 'w+') + fp = open(fn, 'w+') + self._files.append((fp, fn)) + return fp def mkdtemp(self): """Create a temporary directory and return its path.""" d = tempfile.mkdtemp(dir=self._basetempdir) return d - def write_file(self, path, content='xxx'): + def write_file(self, path, content='xxx', encoding=None): """Write a file at the given path. path can be a string, a tuple or a list; if it's a tuple or list, @@ -134,11 +171,8 @@ """ if isinstance(path, (list, tuple)): path = os.path.join(*path) - f = open(path, 'w') - try: + with codecs.open(path, 'w', encoding=encoding) as f: f.write(content) - finally: - f.close() def create_dist(self, **kw): """Create a stub distribution object and files. @@ -151,9 +185,6 @@ You can use self.write_file to write any file in that directory, e.g. setup scripts or Python modules. """ - # Late import so that third parties can import support without - # loading a ton of distutils2 modules in memory. - from distutils2.dist import Distribution if 'name' not in kw: kw['name'] = 'foo' tmp_dir = self.mkdtemp() @@ -176,25 +207,34 @@ def assertIsNotFile(self, *args): path = os.path.join(*args) - assert not os.path.isfile(path), "%s exist" % path + self.assertFalse(os.path.isfile(path), "%r exists" % path) -class EnvironGuard(object): - """TestCase-compatible mixin to save and restore the environment.""" + +class EnvironRestorer(object): + """TestCase-compatible mixin to restore or delete environment variables. + + The variables to restore (or delete if they were not originally present) + must be explicitly listed in self.restore_environ. It's better to be + aware of what we're modifying instead of saving and restoring the whole + environment. + """ def setUp(self): - super(EnvironGuard, self).setUp() - self.old_environ = deepcopy(os.environ) + super(EnvironRestorer, self).setUp() + self._saved = [] + self._added = [] + for key in self.restore_environ: + if key in os.environ: + self._saved.append((key, os.environ[key])) + else: + self._added.append(key) def tearDown(self): - for key, value in self.old_environ.iteritems(): - if os.environ.get(key) != value: - os.environ[key] = value - - for key in os.environ.keys(): - if key not in self.old_environ: - del os.environ[key] - - super(EnvironGuard, self).tearDown() + for key, value in self._saved: + os.environ[key] = value + for key in self._added: + os.environ.pop(key, None) + super(EnvironRestorer, self).tearDown() class DummyCommand(object): @@ -205,7 +245,7 @@ """ def __init__(self, **kwargs): - for kw, val in kwargs.iteritems(): + for kw, val in kwargs.items(): setattr(self, kw, val) def ensure_finalized(self): @@ -234,8 +274,231 @@ return d -try: - from test.test_support import skip_unless_symlink -except ImportError: - skip_unless_symlink = unittest.skip( - 'requires test.test_support.skip_unless_symlink') +def fake_dec(*args, **kw): + """Fake decorator""" + def _wrap(func): + def __wrap(*args, **kw): + return func(*args, **kw) + return __wrap + return _wrap + + +#try: +# from test.support import skip_unless_symlink +#except ImportError: +# skip_unless_symlink = unittest.skip( +# 'requires test.support.skip_unless_symlink') + +if os.name == 'java': + # Jython disallows @ in module names + TESTFN = '$test' +else: + TESTFN = '@test' + +# Disambiguate TESTFN for parallel testing, while letting it remain a valid +# module name. +TESTFN = "{0}_{1}_tmp".format(TESTFN, os.getpid()) + + +# TESTFN_UNICODE is a non-ascii filename +TESTFN_UNICODE = TESTFN + "-\xe0\xf2\u0258\u0141\u011f" +if sys.platform == 'darwin': + # In Mac OS X's VFS API file names are, by definition, canonically + # decomposed Unicode, encoded using UTF-8. See QA1173: + # http://developer.apple.com/mac/library/qa/qa2001/qa1173.html + import unicodedata + TESTFN_UNICODE = unicodedata.normalize('NFD', TESTFN_UNICODE) +TESTFN_ENCODING = sys.getfilesystemencoding() + +# TESTFN_UNENCODABLE is a filename (str type) that should *not* be able to be +# encoded by the filesystem encoding (in strict mode). It can be None if we +# cannot generate such filename. +TESTFN_UNENCODABLE = None +if os.name in ('nt', 'ce'): + # skip win32s (0) or Windows 9x/ME (1) + if sys.getwindowsversion().platform >= 2: + # Different kinds of characters from various languages to minimize the + # probability that the whole name is encodable to MBCS (issue #9819) + TESTFN_UNENCODABLE = TESTFN + "-\u5171\u0141\u2661\u0363\uDC80" + try: + TESTFN_UNENCODABLE.encode(TESTFN_ENCODING) + except UnicodeEncodeError: + pass + else: + print('WARNING: The filename %r CAN be encoded by the filesystem encoding (%s). ' + 'Unicode filename tests may not be effective' + % (TESTFN_UNENCODABLE, TESTFN_ENCODING)) + TESTFN_UNENCODABLE = None +# Mac OS X denies unencodable filenames (invalid utf-8) +elif sys.platform != 'darwin': + try: + # ascii and utf-8 cannot encode the byte 0xff + b'\xff'.decode(TESTFN_ENCODING) + except UnicodeDecodeError: + # 0xff will be encoded using the surrogate character u+DCFF + try: + TESTFN_UNENCODABLE = TESTFN \ + + b'-\xff'.decode(TESTFN_ENCODING, 'surrogateescape') + except LookupError: + pass + else: + # File system encoding (eg. ISO-8859-* encodings) can encode + # the byte 0xff. Skip some unicode filename tests. + pass + +def unlink(filename): + try: + os.unlink(filename) + except OSError: + error = sys.exc_info()[1] + # The filename need not exist. + if error.errno not in (errno.ENOENT, errno.ENOTDIR): + raise + +def _filter_suite(suite, pred): + """Recursively filter test cases in a suite based on a predicate.""" + newtests = [] + for test in suite._tests: + if isinstance(test, unittest.TestSuite): + _filter_suite(test, pred) + newtests.append(test) + else: + if pred(test): + newtests.append(test) + suite._tests = newtests + +class Error(Exception): + """Base class for regression test exceptions.""" + +class TestFailed(Error): + """Test failed.""" + + +verbose = True +failfast = False + +def _run_suite(suite): + """Run tests from a unittest.TestSuite-derived class.""" + if verbose: + runner = unittest.TextTestRunner(sys.stdout, verbosity=2, + failfast=failfast) + else: + runner = BasicTestRunner() + + result = runner.run(suite) + if not result.wasSuccessful(): + if len(result.errors) == 1 and not result.failures: + err = result.errors[0][1] + elif len(result.failures) == 1 and not result.errors: + err = result.failures[0][1] + else: + err = "multiple errors occurred" + if not verbose: err += "; run in verbose mode for details" + raise TestFailed(err) + +match_tests = None + +def run_unittest(*classes): + """Run tests from unittest.TestCase-derived classes.""" + valid_types = (unittest.TestSuite, unittest.TestCase) + suite = unittest.TestSuite() + for cls in classes: + if isinstance(cls, basestring): + if cls in sys.modules: + suite.addTest(unittest.findTestCases(sys.modules[cls])) + else: + raise ValueError("str arguments must be keys in sys.modules") + elif isinstance(cls, valid_types): + suite.addTest(cls) + else: + suite.addTest(unittest.makeSuite(cls)) + def case_pred(test): + if match_tests is None: + return True + for name in test.id().split("."): + if fnmatch.fnmatchcase(name, match_tests): + return True + return False + _filter_suite(suite, case_pred) + _run_suite(suite) + + +def reap_threads(func): + """Use this function when threads are being used. This will + ensure that the threads are cleaned up even when the test fails. + If threading is unavailable this function does nothing. + """ + if not _thread: + return func + + @functools.wraps(func) + def decorator(*args): + key = threading_setup() + try: + return func(*args) + finally: + threading_cleanup(*key) + return decorator + +def reap_children(): + """Use this function at the end of test_main() whenever sub-processes + are started. This will help ensure that no extra children (zombies) + stick around to hog resources and create problems when looking + for refleaks. + """ + + # Reap all our dead child processes so we don't leave zombies around. + # These hog resources and might be causing some of the buildbots to die. + if hasattr(os, 'waitpid'): + any_process = -1 + while True: + try: + # This will raise an exception on Windows. That's ok. + pid, status = os.waitpid(any_process, os.WNOHANG) + if pid == 0: + break + except: + break + +requires_zlib = unittest.skipUnless(zlib, 'requires zlib') + +# Executing the interpreter in a subprocess +def _assert_python(expected_success, *args, **env_vars): + cmd_line = [sys.executable] + if not env_vars: + cmd_line.append('-E') + cmd_line.extend(args) + # Need to preserve the original environment, for in-place testing of + # shared library builds. + env = os.environ.copy() + env.update(env_vars) + p = subprocess.Popen(cmd_line, stdin=subprocess.PIPE, + stdout=subprocess.PIPE, stderr=subprocess.PIPE, + env=env) + try: + out, err = p.communicate() + finally: + subprocess._cleanup() + p.stdout.close() + p.stderr.close() + rc = p.returncode + err = strip_python_stderr(err) + if (rc and expected_success) or (not rc and not expected_success): + raise AssertionError( + "Process return code is %d, " + "stderr follows:\n%s" % (rc, err.decode('ascii', 'ignore'))) + return rc, out, err + +def assert_python_ok(*args, **env_vars): + """ + Assert that running the interpreter with `args` and optional environment + variables `env_vars` is ok and return a (return code, stdout, stderr) tuple. + """ + return _assert_python(True, *args, **env_vars) + +def unload(name): + try: + del sys.modules[name] + except KeyError: + pass + diff --git a/distutils2/tests/test_ccompiler.py b/distutils2/tests/test_ccompiler.py --- a/distutils2/tests/test_ccompiler.py +++ b/distutils2/tests/test_ccompiler.py @@ -1,10 +1,10 @@ -"""Tests for distutils.ccompiler.""" -from distutils2.compiler.ccompiler import CCompiler +"""Tests for distutils.compiler.ccompiler.""" + +from distutils2.compiler import ccompiler from distutils2.tests import unittest, support -class CCompilerTestCase(support.EnvironGuard, unittest.TestCase): - +class CCompilerTestCase(unittest.TestCase): pass # XXX need some tests on CCompiler diff --git a/distutils2/tests/test_command_bdist.py b/distutils2/tests/test_command_bdist.py --- a/distutils2/tests/test_command_bdist.py +++ b/distutils2/tests/test_command_bdist.py @@ -1,13 +1,13 @@ """Tests for distutils.command.bdist.""" from distutils2 import util -from distutils2.tests import run_unittest +from distutils2.command.bdist import bdist, show_formats -from distutils2.command.bdist import bdist, show_formats from distutils2.tests import unittest, support, captured_stdout class BuildTestCase(support.TempdirManager, + support.LoggingCatcher, unittest.TestCase): def _mock_get_platform(self): @@ -74,4 +74,4 @@ return unittest.makeSuite(BuildTestCase) if __name__ == '__main__': - run_unittest(test_suite()) + unittest.main(defaultTest='test_suite') diff --git a/distutils2/tests/test_command_bdist_dumb.py b/distutils2/tests/test_command_bdist_dumb.py --- a/distutils2/tests/test_command_bdist_dumb.py +++ b/distutils2/tests/test_command_bdist_dumb.py @@ -1,54 +1,34 @@ """Tests for distutils.command.bdist_dumb.""" -import sys import os - -# zlib is not used here, but if it's not available -# test_simple_built will fail -try: - import zlib -except ImportError: - zlib = None - -from distutils2.tests import run_unittest, unittest +import distutils2.util from distutils2.dist import Distribution from distutils2.command.bdist_dumb import bdist_dumb -from distutils2.tests import support +from distutils2.tests import unittest, support +from distutils2.tests.support import requires_zlib -SETUP_PY = """\ -from distutils.run import setup -import foo - -setup(name='foo', version='0.1', py_modules=['foo'], - url='xxx', author='xxx', author_email='xxx') - -""" class BuildDumbTestCase(support.TempdirManager, support.LoggingCatcher, - support.EnvironGuard, unittest.TestCase): def setUp(self): super(BuildDumbTestCase, self).setUp() self.old_location = os.getcwd() - self.old_sys_argv = sys.argv, sys.argv[:] def tearDown(self): os.chdir(self.old_location) - sys.argv = self.old_sys_argv[0] - sys.argv[:] = self.old_sys_argv[1] + distutils2.util._path_created.clear() super(BuildDumbTestCase, self).tearDown() - @unittest.skipUnless(zlib, "requires zlib") + @requires_zlib def test_simple_built(self): # let's create a simple package tmp_dir = self.mkdtemp() pkg_dir = os.path.join(tmp_dir, 'foo') os.mkdir(pkg_dir) - self.write_file((pkg_dir, 'setup.py'), SETUP_PY) self.write_file((pkg_dir, 'foo.py'), '#') self.write_file((pkg_dir, 'MANIFEST.in'), 'include foo.py') self.write_file((pkg_dir, 'README'), '') @@ -57,10 +37,7 @@ 'py_modules': ['foo'], 'url': 'xxx', 'author': 'xxx', 'author_email': 'xxx'}) - dist.script_name = 'setup.py' os.chdir(pkg_dir) - - sys.argv = ['setup.py'] cmd = bdist_dumb(dist) # so the output is the same no matter @@ -97,8 +74,9 @@ default = cmd.default_format[os.name] self.assertEqual(cmd.format, default) + def test_suite(): return unittest.makeSuite(BuildDumbTestCase) if __name__ == '__main__': - run_unittest(test_suite()) + unittest.main(defaultTest='test_suite') diff --git a/distutils2/tests/test_command_bdist_msi.py b/distutils2/tests/test_command_bdist_msi.py --- a/distutils2/tests/test_command_bdist_msi.py +++ b/distutils2/tests/test_command_bdist_msi.py @@ -1,9 +1,8 @@ """Tests for distutils.command.bdist_msi.""" import sys -from distutils2.tests import run_unittest +from distutils2.tests import unittest, support -from distutils2.tests import unittest, support class BDistMSITestCase(support.TempdirManager, support.LoggingCatcher, @@ -17,8 +16,10 @@ cmd = bdist_msi(dist) cmd.ensure_finalized() + def test_suite(): return unittest.makeSuite(BDistMSITestCase) + if __name__ == '__main__': - run_unittest(test_suite()) + unittest.main(defaultTest='test_suite') diff --git a/distutils2/tests/test_command_bdist_wininst.py b/distutils2/tests/test_command_bdist_wininst.py --- a/distutils2/tests/test_command_bdist_wininst.py +++ b/distutils2/tests/test_command_bdist_wininst.py @@ -1,7 +1,8 @@ """Tests for distutils.command.bdist_wininst.""" -from distutils2.tests import unittest, support, run_unittest from distutils2.command.bdist_wininst import bdist_wininst +from distutils2.tests import unittest, support + class BuildWinInstTestCase(support.TempdirManager, support.LoggingCatcher, @@ -20,10 +21,12 @@ # and make sure it finds it and returns its content # no matter what platform we have exe_file = cmd.get_exe_bytes() - self.assertTrue(len(exe_file) > 10) + self.assertGreater(len(exe_file), 10) + def test_suite(): return unittest.makeSuite(BuildWinInstTestCase) + if __name__ == '__main__': - run_unittest(test_suite()) + unittest.main(defaultTest='test_suite') diff --git a/distutils2/tests/test_command_build.py b/distutils2/tests/test_command_build.py --- a/distutils2/tests/test_command_build.py +++ b/distutils2/tests/test_command_build.py @@ -3,8 +3,9 @@ import sys from distutils2.command.build import build +from sysconfig import get_platform from distutils2.tests import unittest, support -from distutils2._backport.sysconfig import get_platform + class BuildTestCase(support.TempdirManager, support.LoggingCatcher, @@ -40,12 +41,13 @@ self.assertEqual(cmd.build_temp, wanted) # build_scripts is build/scripts-x.x - wanted = os.path.join(cmd.build_base, 'scripts-' + sys.version[0:3]) + wanted = os.path.join(cmd.build_base, 'scripts-' + sys.version[0:3]) self.assertEqual(cmd.build_scripts, wanted) # executable is os.path.normpath(sys.executable) self.assertEqual(cmd.executable, os.path.normpath(sys.executable)) + def test_suite(): return unittest.makeSuite(BuildTestCase) diff --git a/distutils2/tests/test_command_build_clib.py b/distutils2/tests/test_command_build_clib.py --- a/distutils2/tests/test_command_build_clib.py +++ b/distutils2/tests/test_command_build_clib.py @@ -2,10 +2,11 @@ import os import sys +from distutils2.util import find_executable from distutils2.command.build_clib import build_clib -from distutils2.errors import DistutilsSetupError +from distutils2.errors import PackagingSetupError from distutils2.tests import unittest, support -from distutils2.util import find_executable + class BuildCLibTestCase(support.TempdirManager, support.LoggingCatcher, @@ -16,24 +17,24 @@ cmd = build_clib(dist) # 'libraries' option must be a list - self.assertRaises(DistutilsSetupError, cmd.check_library_list, 'foo') + self.assertRaises(PackagingSetupError, cmd.check_library_list, 'foo') # each element of 'libraries' must a 2-tuple - self.assertRaises(DistutilsSetupError, cmd.check_library_list, + self.assertRaises(PackagingSetupError, cmd.check_library_list, ['foo1', 'foo2']) # first element of each tuple in 'libraries' # must be a string (the library name) - self.assertRaises(DistutilsSetupError, cmd.check_library_list, + self.assertRaises(PackagingSetupError, cmd.check_library_list, [(1, 'foo1'), ('name', 'foo2')]) # library name may not contain directory separators - self.assertRaises(DistutilsSetupError, cmd.check_library_list, + self.assertRaises(PackagingSetupError, cmd.check_library_list, [('name', 'foo1'), ('another/name', 'foo2')]) # second element of each tuple must be a dictionary (build info) - self.assertRaises(DistutilsSetupError, cmd.check_library_list, + self.assertRaises(PackagingSetupError, cmd.check_library_list, [('name', {}), ('another', 'foo2')]) @@ -48,10 +49,10 @@ # "in 'libraries' option 'sources' must be present and must be # a list of source filenames cmd.libraries = [('name', {})] - self.assertRaises(DistutilsSetupError, cmd.get_source_files) + self.assertRaises(PackagingSetupError, cmd.get_source_files) cmd.libraries = [('name', {'sources': 1})] - self.assertRaises(DistutilsSetupError, cmd.get_source_files) + self.assertRaises(PackagingSetupError, cmd.get_source_files) cmd.libraries = [('name', {'sources': ['a', 'b']})] self.assertEqual(cmd.get_source_files(), ['a', 'b']) @@ -64,24 +65,24 @@ self.assertEqual(cmd.get_source_files(), ['a', 'b', 'c', 'd']) def test_build_libraries(self): - pkg_dir, dist = self.create_dist() cmd = build_clib(dist) - class FakeCompiler: + + class FakeCompiler(object): def compile(*args, **kw): pass create_static_lib = compile cmd.compiler = FakeCompiler() - # build_libraries is also doing a bit of typoe checking + # build_libraries is also doing a bit of type checking lib = [('name', {'sources': 'notvalid'})] - self.assertRaises(DistutilsSetupError, cmd.build_libraries, lib) + self.assertRaises(PackagingSetupError, cmd.build_libraries, lib) - lib = [('name', {'sources': list()})] + lib = [('name', {'sources': []})] cmd.build_libraries(lib) - lib = [('name', {'sources': tuple()})] + lib = [('name', {'sources': ()})] cmd.build_libraries(lib) def test_finalize_options(self): @@ -97,13 +98,10 @@ self.assertEqual(cmd.include_dirs, []) cmd.distribution.libraries = 'WONTWORK' - self.assertRaises(DistutilsSetupError, cmd.finalize_options) + self.assertRaises(PackagingSetupError, cmd.finalize_options) + @unittest.skipIf(sys.platform == 'win32', 'disabled on win32') def test_run(self): - # can't test on windows - if sys.platform == 'win32': - return - pkg_dir, dist = self.create_dist() cmd = build_clib(dist) @@ -127,13 +125,14 @@ if ccmd is None: continue if find_executable(ccmd[0]) is None: - return # can't test + raise unittest.SkipTest("can't test") # this should work cmd.run() # let's check the result - self.assertTrue('libfoo.a' in os.listdir(build_temp)) + self.assertIn('libfoo.a', os.listdir(build_temp)) + def test_suite(): return unittest.makeSuite(BuildCLibTestCase) diff --git a/distutils2/tests/test_command_build_ext.py b/distutils2/tests/test_command_build_ext.py --- a/distutils2/tests/test_command_build_ext.py +++ b/distutils2/tests/test_command_build_ext.py @@ -1,26 +1,29 @@ +import os import sys -import os +import site import shutil +import sysconfig +import textwrap from StringIO import StringIO +from distutils2.dist import Distribution +from distutils2.errors import (UnknownFileError, CompileError, + PackagingPlatformError) +from distutils2.command.build_ext import build_ext +from distutils2.compiler.extension import Extension +from .support import assert_python_ok -import distutils2.tests -from distutils2.tests import unittest -from distutils2.compiler.extension import Extension -from distutils2.dist import Distribution -from distutils2.command.build_ext import build_ext -from distutils2.tests import support -from distutils2.errors import (UnknownFileError, DistutilsSetupError, - CompileError) -from distutils2._backport import sysconfig +from distutils2.tests import support, unittest, verbose, unload -# http://bugs.python.org/issue4373 -# Don't load the xx module more than once. -ALREADY_TESTED = False -CURDIR = os.path.abspath(os.path.dirname(__file__)) - def _get_source_filename(): - return os.path.join(CURDIR, 'xxmodule.c') + # use installed copy if available + tests_f = os.path.join(os.path.dirname(__file__), 'xxmodule.c') + if os.path.exists(tests_f): + return tests_f + # otherwise try using copy from build directory + srcdir = sysconfig.get_config_var('srcdir') + return os.path.join(srcdir, 'Modules', 'xxmodule.c') + class BuildExtTestCase(support.TempdirManager, support.LoggingCatcher, @@ -30,25 +33,51 @@ # Note that we're making changes to sys.path super(BuildExtTestCase, self).setUp() self.tmp_dir = self.mkdtemp() - self.sys_path = sys.path, sys.path[:] - sys.path.append(self.tmp_dir) - shutil.copy(_get_source_filename(), self.tmp_dir) + filename = _get_source_filename() + if os.path.exists(filename): + shutil.copy(filename, self.tmp_dir) + self.old_user_base = site.USER_BASE + site.USER_BASE = self.mkdtemp() + build_ext.USER_BASE = site.USER_BASE + + def tearDown(self): + # Get everything back to normal if sys.version > "2.6": - import site - self.old_user_base = site.USER_BASE - site.USER_BASE = self.mkdtemp() - from distutils2.command import build_ext - build_ext.USER_BASE = site.USER_BASE + site.USER_BASE = self.old_user_base + build_ext.USER_BASE = self.old_user_base - # XXX only works with 2.6 > -- dunno why yet - @unittest.skipIf(sys.version < '2.6', 'requires Python 2.6 or higher') + super(BuildExtTestCase, self).tearDown() + + def _fixup_command(self, cmd): + # When Python was build with --enable-shared, -L. is not good enough + # to find the libpython.so. This is because regrtest runs it + # under a tempdir, not in the top level where the .so lives. By the + # time we've gotten here, Python's already been chdir'd to the + # tempdir. + # + # To further add to the fun, we can't just add library_dirs to the + # Extension() instance because that doesn't get plumbed through to the + # final compiler command. + if (sysconfig.get_config_var('Py_ENABLE_SHARED') and + not sys.platform.startswith('win')): + runshared = sysconfig.get_config_var('RUNSHARED') + if runshared is None: + cmd.library_dirs = ['.'] + else: + name, equals, value = runshared.partition('=') + cmd.library_dirs = value.split(os.pathsep) + def test_build_ext(self): - global ALREADY_TESTED xx_c = os.path.join(self.tmp_dir, 'xxmodule.c') + if not os.path.exists(xx_c): + # skipping if we cannot find it + return xx_ext = Extension('xx', [xx_c]) dist = Distribution({'name': 'xx', 'ext_modules': [xx_ext]}) dist.package_dir = self.tmp_dir cmd = build_ext(dist) + self._fixup_command(cmd) + if os.name == "nt": # On Windows, we must build a debug version iff running # a debug build of Python @@ -57,7 +86,7 @@ cmd.build_temp = self.tmp_dir old_stdout = sys.stdout - if not distutils2.tests.verbose: + if not verbose: # silence compiler output sys.stdout = StringIO() try: @@ -66,44 +95,32 @@ finally: sys.stdout = old_stdout - if ALREADY_TESTED: - return - else: - ALREADY_TESTED = True + code = """if 1: + import sys + sys.path.insert(0, %r) - import xx + import xx - for attr in ('error', 'foo', 'new', 'roj'): - self.assertTrue(hasattr(xx, attr)) + for attr in ('error', 'foo', 'new', 'roj'): + assert hasattr(xx, attr) - self.assertEqual(xx.foo(2, 5), 7) - self.assertEqual(xx.foo(13,15), 28) - self.assertEqual(xx.new().demo(), None) - doc = 'This is a template module just for instruction.' - self.assertEqual(xx.__doc__, doc) - self.assertTrue(isinstance(xx.Null(), xx.Null)) - self.assertTrue(isinstance(xx.Str(), xx.Str)) - - def tearDown(self): - # Get everything back to normal - distutils2.tests.unload('xx') - sys.path = self.sys_path[0] - sys.path[:] = self.sys_path[1] - if sys.version > "2.6": - import site - site.USER_BASE = self.old_user_base - from distutils2.command import build_ext - build_ext.USER_BASE = self.old_user_base - - super(BuildExtTestCase, self).tearDown() + assert xx.foo(2, 5) == 7 + assert xx.foo(13, 15) == 28 + assert xx.new().demo() is None + doc = 'This is a template module just for instruction.' + assert xx.__doc__ == doc + assert isinstance(xx.Null(), xx.Null) + assert isinstance(xx.Str(), xx.Str)""" + code = code % self.tmp_dir + assert_python_ok('-c', code) def test_solaris_enable_shared(self): dist = Distribution({'name': 'xx'}) cmd = build_ext(dist) old = sys.platform - sys.platform = 'sunos' # fooling finalize_options - from distutils2._backport.sysconfig import _CONFIG_VARS + sys.platform = 'sunos' # fooling finalize_options + from sysconfig import _CONFIG_VARS old_var = _CONFIG_VARS.get('Py_ENABLE_SHARED') _CONFIG_VARS['Py_ENABLE_SHARED'] = 1 @@ -117,21 +134,20 @@ _CONFIG_VARS['Py_ENABLE_SHARED'] = old_var # make sure we get some library dirs under solaris - self.assertTrue(len(cmd.library_dirs) > 0) + self.assertGreater(len(cmd.library_dirs), 0) @unittest.skipIf(sys.version < '2.6', 'requires Python 2.6 or higher') def test_user_site(self): - import site dist = Distribution({'name': 'xx'}) cmd = build_ext(dist) # making sure the user option is there - options = [name for name, short, lable in + options = [name for name, short, label in cmd.user_options] - self.assertTrue('user' in options) + self.assertIn('user', options) # setting a value - cmd.user = 1 + cmd.user = True # setting user based lib and include lib = os.path.join(site.USER_BASE, 'lib') @@ -144,9 +160,9 @@ # see if include_dirs and library_dirs # were set - self.assertTrue(lib in cmd.library_dirs) - self.assertTrue(lib in cmd.rpath) - self.assertTrue(incl in cmd.include_dirs) + self.assertIn(lib, cmd.library_dirs) + self.assertIn(lib, cmd.rpath) + self.assertIn(incl, cmd.include_dirs) def test_optional_extension(self): @@ -174,10 +190,10 @@ cmd.finalize_options() py_include = sysconfig.get_path('include') - self.assertTrue(py_include in cmd.include_dirs) + self.assertIn(py_include, cmd.include_dirs) plat_py_include = sysconfig.get_path('platinclude') - self.assertTrue(plat_py_include in cmd.include_dirs) + self.assertIn(plat_py_include, cmd.include_dirs) # make sure cmd.libraries is turned into a list # if it's a string @@ -191,7 +207,7 @@ cmd = build_ext(dist) cmd.library_dirs = 'my_lib_dir' cmd.finalize_options() - self.assertTrue('my_lib_dir' in cmd.library_dirs) + self.assertIn('my_lib_dir', cmd.library_dirs) # make sure rpath is turned into a list # if it's a list of os.pathsep's paths @@ -248,11 +264,12 @@ def test_get_outputs(self): tmp_dir = self.mkdtemp() c_file = os.path.join(tmp_dir, 'foo.c') - self.write_file(c_file, 'void initfoo(void) {};\n') + self.write_file(c_file, 'void PyInit_foo(void) {}\n') ext = Extension('foo', [c_file], optional=False) dist = Distribution({'name': 'xx', 'ext_modules': [ext]}) cmd = build_ext(dist) + self._fixup_command(cmd) cmd.ensure_finalized() self.assertEqual(len(cmd.get_outputs()), 1) @@ -268,7 +285,7 @@ old_wd = os.getcwd() os.chdir(other_tmp_dir) try: - cmd.inplace = 1 + cmd.inplace = True cmd.run() so_file = cmd.get_outputs()[0] finally: @@ -279,7 +296,7 @@ so_dir = os.path.dirname(so_file) self.assertEqual(so_dir, other_tmp_dir) - cmd.inplace = 0 + cmd.inplace = False cmd.run() so_file = cmd.get_outputs()[0] self.assertTrue(os.path.exists(so_file)) @@ -287,7 +304,7 @@ so_dir = os.path.dirname(so_file) self.assertEqual(so_dir, cmd.build_lib) - # inplace = 0, cmd.package = 'bar' + # inplace = False, cmd.package = 'bar' build_py = cmd.get_finalized_command('build_py') build_py.package_dir = 'bar' path = cmd.get_ext_fullpath('foo') @@ -295,8 +312,8 @@ path = os.path.split(path)[0] self.assertEqual(path, cmd.build_lib) - # inplace = 1, cmd.package = 'bar' - cmd.inplace = 1 + # inplace = True, cmd.package = 'bar' + cmd.inplace = True other_tmp_dir = os.path.realpath(self.mkdtemp()) old_wd = os.getcwd() os.chdir(other_tmp_dir) @@ -317,7 +334,7 @@ #dist = Distribution({'name': 'lxml', 'ext_modules': [etree_ext]}) dist = Distribution() cmd = build_ext(dist) - cmd.inplace = 1 + cmd.inplace = True cmd.distribution.package_dir = 'src' cmd.distribution.packages = ['lxml', 'lxml.html'] curdir = os.getcwd() @@ -326,7 +343,7 @@ self.assertEqual(wanted, path) # building lxml.etree not inplace - cmd.inplace = 0 + cmd.inplace = False cmd.build_lib = os.path.join(curdir, 'tmpdir') wanted = os.path.join(curdir, 'tmpdir', 'lxml', 'etree' + ext) path = cmd.get_ext_fullpath('lxml.etree') @@ -342,19 +359,110 @@ self.assertEqual(wanted, path) # building twisted.runner.portmap inplace - cmd.inplace = 1 + cmd.inplace = True path = cmd.get_ext_fullpath('twisted.runner.portmap') wanted = os.path.join(curdir, 'twisted', 'runner', 'portmap' + ext) self.assertEqual(wanted, path) + @unittest.skipUnless(sys.platform == 'darwin', + 'test only relevant for Mac OS X') + def test_deployment_target_default(self): + # Issue 9516: Test that, in the absence of the environment variable, + # an extension module is compiled with the same deployment target as + # the interpreter. + self._try_compile_deployment_target('==', None) + + @unittest.skipUnless(sys.platform == 'darwin', + 'test only relevant for Mac OS X') + def test_deployment_target_too_low(self): + # Issue 9516: Test that an extension module is not allowed to be + # compiled with a deployment target less than that of the interpreter. + self.assertRaises(PackagingPlatformError, + self._try_compile_deployment_target, '>', '10.1') + + @unittest.skipUnless(sys.platform == 'darwin', + 'test only relevant for Mac OS X') + def test_deployment_target_higher_ok(self): + # Issue 9516: Test that an extension module can be compiled with a + # deployment target higher than that of the interpreter: the ext + # module may depend on some newer OS feature. + deptarget = sysconfig.get_config_var('MACOSX_DEPLOYMENT_TARGET') + if deptarget: + # increment the minor version number (i.e. 10.6 -> 10.7) + deptarget = [int(x) for x in deptarget.split('.')] + deptarget[-1] += 1 + deptarget = '.'.join(str(i) for i in deptarget) + self._try_compile_deployment_target('<', deptarget) + + def _try_compile_deployment_target(self, operator, target): + orig_environ = os.environ + os.environ = orig_environ.copy() + self.addCleanup(setattr, os, 'environ', orig_environ) + + if target is None: + if os.environ.get('MACOSX_DEPLOYMENT_TARGET'): + del os.environ['MACOSX_DEPLOYMENT_TARGET'] + else: + os.environ['MACOSX_DEPLOYMENT_TARGET'] = target + + deptarget_c = os.path.join(self.tmp_dir, 'deptargetmodule.c') + + with open(deptarget_c, 'w') as fp: + fp.write(textwrap.dedent('''\ + #include + + int dummy; + + #if TARGET %s MAC_OS_X_VERSION_MIN_REQUIRED + #else + #error "Unexpected target" + #endif + + ''' % operator)) + + # get the deployment target that the interpreter was built with + target = sysconfig.get_config_var('MACOSX_DEPLOYMENT_TARGET') + target = tuple(map(int, target.split('.'))) + target = '%02d%01d0' % target + + deptarget_ext = Extension( + 'deptarget', + [deptarget_c], + extra_compile_args=['-DTARGET=%s' % (target,)], + ) + dist = Distribution({ + 'name': 'deptarget', + 'ext_modules': [deptarget_ext], + }) + dist.package_dir = self.tmp_dir + cmd = build_ext(dist) + cmd.build_lib = self.tmp_dir + cmd.build_temp = self.tmp_dir + + try: + old_stdout = sys.stdout + if not verbose: + # silence compiler output + sys.stdout = StringIO() + try: + cmd.ensure_finalized() + cmd.run() + finally: + sys.stdout = old_stdout + + except CompileError: + self.fail("Wrong deployment target during compilation") + + def test_suite(): src = _get_source_filename() if not os.path.exists(src): - if distutils2.tests.verbose: - print ('test_build_ext: Cannot find source code (test' - ' must run in python build dir)') + if verbose: + print('test_command_build_ext: Cannot find source code (test' + ' must run in python build dir)') return unittest.TestSuite() - else: return unittest.makeSuite(BuildExtTestCase) + else: + return unittest.makeSuite(BuildExtTestCase) if __name__ == '__main__': - distutils2.tests.run_unittest(test_suite()) + unittest.main(defaultTest='test_suite') diff --git a/distutils2/tests/test_command_build_py.py b/distutils2/tests/test_command_build_py.py --- a/distutils2/tests/test_command_build_py.py +++ b/distutils2/tests/test_command_build_py.py @@ -2,11 +2,10 @@ import os import sys -import StringIO from distutils2.command.build_py import build_py from distutils2.dist import Distribution -from distutils2.errors import DistutilsFileError +from distutils2.errors import PackagingFileError from distutils2.tests import unittest, support @@ -34,11 +33,9 @@ dist = Distribution({"packages": ["pkg"], "package_dir": sources}) - # script_name need not exist, it just need to be initialized - dist.script_name = os.path.join(sources, "setup.py") dist.command_obj["build"] = support.DummyCommand( - force=0, + force=False, build_lib=destination, use_2to3_fixers=None, convert_2to3_doctests=None, @@ -48,7 +45,7 @@ dist.package_dir = sources cmd = build_py(dist) - cmd.compile = 1 + cmd.compile = True cmd.ensure_finalized() self.assertEqual(cmd.package_data, dist.package_data) @@ -61,11 +58,15 @@ self.assertEqual(len(cmd.get_outputs()), 3) pkgdest = os.path.join(destination, "pkg") files = os.listdir(pkgdest) - self.assertTrue("__init__.py" in files) - self.assertTrue("__init__.pyc" in files) - self.assertTrue("README.txt" in files) + self.assertIn("__init__.py", files) + self.assertIn("README.txt", files) + # XXX even with -O, distutils writes pyc, not pyo; bug? + if sys.dont_write_bytecode: + self.assertNotIn("__init__.pyc", files) + else: + self.assertIn("__init__.pyc", files) - def test_empty_package_dir (self): + def test_empty_package_dir(self): # See SF 1668596/1720897. cwd = os.getcwd() @@ -73,27 +74,24 @@ sources = self.mkdtemp() pkg = os.path.join(sources, 'pkg') os.mkdir(pkg) - open(os.path.join(pkg, "__init__.py"), "w").close() + open(os.path.join(pkg, "__init__.py"), "wb").close() testdir = os.path.join(pkg, "doc") os.mkdir(testdir) - open(os.path.join(testdir, "testfile"), "w").close() + open(os.path.join(testdir, "testfile"), "wb").close() os.chdir(sources) old_stdout = sys.stdout - #sys.stdout = StringIO.StringIO() try: dist = Distribution({"packages": ["pkg"], "package_dir": sources, "package_data": {"pkg": ["doc/*"]}}) - # script_name need not exist, it just need to be initialized - dist.script_name = os.path.join(sources, "setup.py") dist.script_args = ["build"] dist.parse_command_line() try: dist.run_commands() - except DistutilsFileError, e: + except PackagingFileError: self.fail("failed package_data test when package_dir is ''") finally: # Restore state. @@ -106,7 +104,7 @@ # makes sure byte_compile is not used pkg_dir, dist = self.create_dist() cmd = build_py(dist) - cmd.compile = 1 + cmd.compile = True cmd.optimize = 1 old_dont_write_bytecode = sys.dont_write_bytecode @@ -116,7 +114,7 @@ finally: sys.dont_write_bytecode = old_dont_write_bytecode - self.assertIn('byte-compiling is disabled', self.logs[0][2][1]) + self.assertIn('byte-compiling is disabled', self.get_logs()[0]) def test_suite(): return unittest.makeSuite(BuildPyTestCase) diff --git a/distutils2/tests/test_command_build_scripts.py b/distutils2/tests/test_command_build_scripts.py --- a/distutils2/tests/test_command_build_scripts.py +++ b/distutils2/tests/test_command_build_scripts.py @@ -1,11 +1,10 @@ """Tests for distutils.command.build_scripts.""" import os - +import sys +import sysconfig +from distutils2.dist import Distribution from distutils2.command.build_scripts import build_scripts -from distutils2.dist import Distribution -from distutils2._backport import sysconfig - from distutils2.tests import unittest, support @@ -15,8 +14,8 @@ def test_default_settings(self): cmd = self.get_build_scripts_cmd("/foo/bar", []) - self.assertTrue(not cmd.force) - self.assertTrue(cmd.build_dir is None) + self.assertFalse(cmd.force) + self.assertIs(cmd.build_dir, None) cmd.finalize_options() @@ -36,15 +35,14 @@ built = os.listdir(target) for name in expected: - self.assertTrue(name in built) + self.assertIn(name, built) def get_build_scripts_cmd(self, target, scripts): - import sys dist = Distribution() dist.scripts = scripts dist.command_obj["build"] = support.DummyCommand( build_scripts=target, - force=1, + force=True, executable=sys.executable, use_2to3=False, use_2to3_fixers=None, @@ -72,11 +70,8 @@ return expected def write_script(self, dir, name, text): - f = open(os.path.join(dir, name), "w") - try: + with open(os.path.join(dir, name), "w") as f: f.write(text) - finally: - f.close() def test_version_int(self): source = self.mkdtemp() @@ -104,7 +99,7 @@ built = os.listdir(target) for name in expected: - self.assertTrue(name in built) + self.assertIn(name, built) def test_suite(): return unittest.makeSuite(BuildScriptsTestCase) diff --git a/distutils2/tests/test_command_check.py b/distutils2/tests/test_command_check.py --- a/distutils2/tests/test_command_check.py +++ b/distutils2/tests/test_command_check.py @@ -1,10 +1,10 @@ """Tests for distutils.command.check.""" +import logging from distutils2.command.check import check from distutils2.metadata import _HAS_DOCUTILS +from distutils2.errors import PackagingSetupError, MetadataMissingError from distutils2.tests import unittest, support -from distutils2.errors import DistutilsSetupError -from distutils2.errors import MetadataMissingError class CheckTestCase(support.LoggingCatcher, @@ -13,7 +13,7 @@ def _run(self, metadata=None, **options): if metadata is None: - metadata = {'name': 'xxx', 'version': 'xxx'} + metadata = {'name': 'xxx', 'version': '1.2'} pkg_info, dist = self.create_dist(**metadata) cmd = check(dist) cmd.initialize_options() @@ -28,66 +28,70 @@ # by default, check is checking the metadata # should have some warnings cmd = self._run() - self.assertTrue(len(cmd._warnings) > 0) + # trick: using assertNotEqual with an empty list will give us a more + # useful error message than assertGreater(.., 0) when the code change + # and the test fails + self.assertNotEqual([], self.get_logs(logging.WARNING)) # now let's add the required fields # and run it again, to make sure we don't get # any warning anymore metadata = {'home_page': 'xxx', 'author': 'xxx', 'author_email': 'xxx', - 'name': 'xxx', 'version': 'xxx', + 'name': 'xxx', 'version': '4.2', } cmd = self._run(metadata) - self.assertEqual(len(cmd._warnings), 0) + self.assertEqual([], self.get_logs(logging.WARNING)) # now with the strict mode, we should # get an error if there are missing metadata self.assertRaises(MetadataMissingError, self._run, {}, **{'strict': 1}) - self.assertRaises(DistutilsSetupError, self._run, + self.assertRaises(PackagingSetupError, self._run, {'name': 'xxx', 'version': 'xxx'}, **{'strict': 1}) + # clear warnings from the previous calls + self.loghandler.flush() + # and of course, no error when all metadata fields are present - cmd = self._run(metadata, strict=1) - self.assertEqual(len(cmd._warnings), 0) + cmd = self._run(metadata, strict=True) + self.assertEqual([], self.get_logs(logging.WARNING)) def test_check_metadata_1_2(self): # let's run the command with no metadata at all # by default, check is checking the metadata # should have some warnings cmd = self._run() - self.assertTrue(len(cmd._warnings) > 0) + self.assertNotEqual([], self.get_logs(logging.WARNING)) - # now let's add the required fields - # and run it again, to make sure we don't get - # any warning anymore - # let's use requires_python as a marker to enforce - # Metadata-Version 1.2 + # now let's add the required fields and run it again, to make sure we + # don't get any warning anymore let's use requires_python as a marker + # to enforce Metadata-Version 1.2 metadata = {'home_page': 'xxx', 'author': 'xxx', 'author_email': 'xxx', - 'name': 'xxx', 'version': 'xxx', + 'name': 'xxx', 'version': '4.2', 'requires_python': '2.4', } cmd = self._run(metadata) - self.assertEqual(len(cmd._warnings), 1) + self.assertEqual([], self.get_logs(logging.WARNING)) # now with the strict mode, we should # get an error if there are missing metadata self.assertRaises(MetadataMissingError, self._run, {}, **{'strict': 1}) - self.assertRaises(DistutilsSetupError, self._run, + self.assertRaises(PackagingSetupError, self._run, {'name': 'xxx', 'version': 'xxx'}, **{'strict': 1}) # complain about version format - self.assertRaises(DistutilsSetupError, self._run, metadata, + metadata['version'] = 'xxx' + self.assertRaises(PackagingSetupError, self._run, metadata, **{'strict': 1}) - # now with correct version format - metadata = {'home_page': 'xxx', 'author': 'xxx', - 'author_email': 'xxx', - 'name': 'xxx', 'version': '1.2', - 'requires_python': '2.4', - } - cmd = self._run(metadata, strict=1) - self.assertEqual(len(cmd._warnings), 0) + # clear warnings from the previous calls + self.loghandler.flush() + + # now with correct version format again + metadata['version'] = '4.2' + cmd = self._run(metadata, strict=True) + self.assertEqual([], self.get_logs(logging.WARNING)) @unittest.skipUnless(_HAS_DOCUTILS, "requires docutils") def test_check_restructuredtext(self): @@ -96,16 +100,15 @@ pkg_info, dist = self.create_dist(description=broken_rest) cmd = check(dist) cmd.check_restructuredtext() - self.assertEqual(len(cmd._warnings), 1) + self.assertEqual(len(self.get_logs(logging.WARNING)), 1) pkg_info, dist = self.create_dist(description='title\n=====\n\ntest') cmd = check(dist) cmd.check_restructuredtext() - self.assertEqual(len(cmd._warnings), 0) + self.assertEqual([], self.get_logs(logging.WARNING)) def test_check_all(self): - - self.assertRaises(DistutilsSetupError, self._run, + self.assertRaises(PackagingSetupError, self._run, {'name': 'xxx', 'version': 'xxx'}, **{'strict': 1, 'all': 1}) self.assertRaises(MetadataMissingError, self._run, @@ -119,7 +122,18 @@ } cmd = check(dist) cmd.check_hooks_resolvable() - self.assertEqual(len(cmd._warnings), 1) + self.assertEqual(len(self.get_logs(logging.WARNING)), 1) + + def test_warn(self): + _, dist = self.create_dist() + cmd = check(dist) + self.assertEqual([], self.get_logs()) + cmd.warn('hello') + self.assertEqual(['check: hello'], self.get_logs()) + cmd.warn('hello %s', 'world') + self.assertEqual(['check: hello world'], self.get_logs()) + cmd.warn('hello %s %s', 'beautiful', 'world') + self.assertEqual(['check: hello beautiful world'], self.get_logs()) def test_suite(): diff --git a/distutils2/tests/test_command_clean.py b/distutils2/tests/test_command_clean.py --- a/distutils2/tests/test_command_clean.py +++ b/distutils2/tests/test_command_clean.py @@ -4,8 +4,8 @@ from distutils2.command.clean import clean from distutils2.tests import unittest, support -class cleanTestCase(support.TempdirManager, - support.LoggingCatcher, + +class cleanTestCase(support.TempdirManager, support.LoggingCatcher, unittest.TestCase): def test_simple_run(self): @@ -26,20 +26,21 @@ self.write_file(os.path.join(path, f)) # let's run the command - cmd.all = 1 + cmd.all = True cmd.ensure_finalized() cmd.run() # make sure the files where removed for name, path in dirs: - self.assertTrue(not os.path.exists(path), - '%s was not removed' % path) + self.assertFalse(os.path.exists(path), + '%r was not removed' % path) - # let's run the command again (should spit warnings but suceed) - cmd.all = 1 + # let's run the command again (should spit warnings but succeed) + cmd.all = True cmd.ensure_finalized() cmd.run() + def test_suite(): return unittest.makeSuite(cleanTestCase) diff --git a/distutils2/tests/test_command_cmd.py b/distutils2/tests/test_command_cmd.py --- a/distutils2/tests/test_command_cmd.py +++ b/distutils2/tests/test_command_cmd.py @@ -1,24 +1,26 @@ """Tests for distutils.cmd.""" import os -from distutils2.tests import run_unittest from distutils2.command.cmd import Command from distutils2.dist import Distribution -from distutils2.errors import DistutilsOptionError -from distutils2.tests import unittest +from distutils2.errors import PackagingOptionError +from distutils2.tests import support, unittest + class MyCmd(Command): def initialize_options(self): pass -class CommandTestCase(unittest.TestCase): + +class CommandTestCase(support.LoggingCatcher, + unittest.TestCase): def setUp(self): + super(CommandTestCase, self).setUp() dist = Distribution() self.cmd = MyCmd(dist) def test_make_file(self): - cmd = self.cmd # making sure it raises when infiles is not a string or a list/tuple @@ -33,12 +35,7 @@ cmd.make_file(infiles='in', outfile='out', func='func', args=()) def test_dump_options(self): - - msgs = [] - def _announce(msg, level): - msgs.append(msg) cmd = self.cmd - cmd.announce = _announce cmd.option1 = 1 cmd.option2 = 1 cmd.user_options = [('option1', '', ''), ('option2', '', '')] @@ -46,6 +43,7 @@ wanted = ["command options for 'MyCmd':", ' option1 = 1', ' option2 = 1'] + msgs = self.get_logs() self.assertEqual(msgs, wanted) def test_ensure_string(self): @@ -58,7 +56,7 @@ self.assertTrue(hasattr(cmd, 'option2')) cmd.option3 = 1 - self.assertRaises(DistutilsOptionError, cmd.ensure_string, 'option3') + self.assertRaises(PackagingOptionError, cmd.ensure_string, 'option3') def test_ensure_string_list(self): cmd = self.cmd @@ -75,10 +73,10 @@ cmd.not_string_list = ['one', 2, 'three'] cmd.not_string_list2 = object() - self.assertRaises(DistutilsOptionError, + self.assertRaises(PackagingOptionError, cmd.ensure_string_list, 'not_string_list') - self.assertRaises(DistutilsOptionError, + self.assertRaises(PackagingOptionError, cmd.ensure_string_list, 'not_string_list2') def test_ensure_filename(self): @@ -86,17 +84,18 @@ cmd.option1 = __file__ cmd.ensure_filename('option1') cmd.option2 = 'xxx' - self.assertRaises(DistutilsOptionError, cmd.ensure_filename, 'option2') + self.assertRaises(PackagingOptionError, cmd.ensure_filename, 'option2') def test_ensure_dirname(self): cmd = self.cmd cmd.option1 = os.path.dirname(__file__) or os.curdir cmd.ensure_dirname('option1') cmd.option2 = 'xxx' - self.assertRaises(DistutilsOptionError, cmd.ensure_dirname, 'option2') + self.assertRaises(PackagingOptionError, cmd.ensure_dirname, 'option2') + def test_suite(): return unittest.makeSuite(CommandTestCase) if __name__ == '__main__': - run_unittest(test_suite()) + unittest.main(defaultTest='test_suite') diff --git a/distutils2/tests/test_command_config.py b/distutils2/tests/test_command_config.py --- a/distutils2/tests/test_command_config.py +++ b/distutils2/tests/test_command_config.py @@ -1,32 +1,30 @@ """Tests for distutils.command.config.""" import os import sys +import logging from distutils2.command.config import dump_file, config from distutils2.tests import unittest, support + class ConfigTestCase(support.LoggingCatcher, support.TempdirManager, unittest.TestCase): def test_dump_file(self): - this_file = os.path.splitext(__file__)[0] + '.py' - f = open(this_file) - try: + this_file = __file__.rstrip('co') + with open(this_file) as f: numlines = len(f.readlines()) - finally: - f.close() dump_file(this_file, 'I am the header') + logs = [] - for log in self.logs: - log = log[1] - logs.extend([log for log in log.split('\n')]) - self.assertEqual(len(logs), numlines+2) + for log in self.get_logs(logging.INFO): + logs.extend(line for line in log.split('\n')) + self.assertEqual(len(logs), numlines + 2) + @unittest.skipIf(sys.platform == 'win32', 'disabled on win32') def test_search_cpp(self): - if sys.platform == 'win32': - return pkg_dir, dist = self.create_dist() cmd = config(dist) @@ -68,7 +66,8 @@ cmd._clean(f1, f2) for f in (f1, f2): - self.assertTrue(not os.path.exists(f)) + self.assertFalse(os.path.exists(f)) + def test_suite(): return unittest.makeSuite(ConfigTestCase) diff --git a/distutils2/tests/test_command_install_data.py b/distutils2/tests/test_command_install_data.py --- a/distutils2/tests/test_command_install_data.py +++ b/distutils2/tests/test_command_install_data.py @@ -1,30 +1,33 @@ -"""Tests for distutils.command.install_data.""" -import cmd +"""Tests for distutils2.command.install_data.""" import os +import sysconfig +from sysconfig import _get_default_scheme +from distutils2.tests import unittest, support +from distutils2.command.install_data import install_data -from distutils2.command.install_data import install_data -from distutils2.tests import unittest, support class InstallDataTestCase(support.TempdirManager, support.LoggingCatcher, - support.EnvironGuard, unittest.TestCase): def test_simple_run(self): - from distutils2._backport.sysconfig import _SCHEMES as sysconfig_SCHEMES - from distutils2._backport.sysconfig import _get_default_scheme - #dirty but hit marmoute - - old_scheme = sysconfig_SCHEMES + scheme = _get_default_scheme() + old_items = sysconfig._SCHEMES.items(scheme) + def restore(): + sysconfig._SCHEMES.remove_section(scheme) + sysconfig._SCHEMES.add_section(scheme) + for option, value in old_items: + sysconfig._SCHEMES.set(scheme, option, value) + self.addCleanup(restore) pkg_dir, dist = self.create_dist() cmd = install_data(dist) cmd.install_dir = inst = os.path.join(pkg_dir, 'inst') - sysconfig_SCHEMES.set(_get_default_scheme(), 'inst', - os.path.join(pkg_dir, 'inst')) - sysconfig_SCHEMES.set(_get_default_scheme(), 'inst2', - os.path.join(pkg_dir, 'inst2')) + sysconfig._SCHEMES.set(scheme, 'inst', + os.path.join(pkg_dir, 'inst')) + sysconfig._SCHEMES.set(scheme, 'inst2', + os.path.join(pkg_dir, 'inst2')) one = os.path.join(pkg_dir, 'one') self.write_file(one, 'xxx') @@ -32,7 +35,7 @@ two = os.path.join(pkg_dir, 'two') self.write_file(two, 'xxx') - cmd.data_files = {one : '{inst}/one', two : '{inst2}/two'} + cmd.data_files = {one: '{inst}/one', two: '{inst2}/two'} self.assertItemsEqual(cmd.get_inputs(), [one, two]) # let's run the command @@ -48,7 +51,7 @@ cmd.outfiles = [] # let's try with warn_dir one - cmd.warn_dir = 1 + cmd.warn_dir = True cmd.ensure_finalized() cmd.run() @@ -60,15 +63,14 @@ # now using root and empty dir cmd.root = os.path.join(pkg_dir, 'root') - inst4 = os.path.join(pkg_dir, 'inst4') three = os.path.join(cmd.install_dir, 'three') self.write_file(three, 'xx') - sysconfig_SCHEMES.set(_get_default_scheme(), 'inst3', cmd.install_dir) + sysconfig._SCHEMES.set(scheme, 'inst3', + cmd.install_dir) - cmd.data_files = {one : '{inst}/one', - two : '{inst2}/two', - three : '{inst3}/three'} + cmd.data_files = {one: '{inst}/one', two: '{inst2}/two', + three: '{inst3}/three'} cmd.ensure_finalized() cmd.run() @@ -77,7 +79,6 @@ self.assertTrue(os.path.exists(os.path.join(inst2, rtwo))) self.assertTrue(os.path.exists(os.path.join(inst, rone))) - sysconfig_SCHEMES = old_scheme def test_suite(): return unittest.makeSuite(InstallDataTestCase) diff --git a/distutils2/tests/test_command_install_dist.py b/distutils2/tests/test_command_install_dist.py --- a/distutils2/tests/test_command_install_dist.py +++ b/distutils2/tests/test_command_install_dist.py @@ -1,12 +1,10 @@ -"""Tests for distutils.command.install.""" +"""Tests for distutils2.command.install.""" import os import sys -from distutils2._backport.sysconfig import (get_scheme_names, - get_config_vars, - _SCHEMES, - get_config_var, get_path) +from sysconfig import (get_scheme_names, get_config_vars, + _SCHEMES, get_config_var, get_path) _CONFIG_VARS = get_config_vars() @@ -15,13 +13,12 @@ from distutils2.command.install_dist import install_dist from distutils2.command import install_dist as install_module from distutils2.dist import Distribution -from distutils2.errors import DistutilsOptionError +from distutils2.errors import PackagingOptionError from distutils2.tests import unittest, support class InstallTestCase(support.TempdirManager, - support.EnvironGuard, support.LoggingCatcher, unittest.TestCase): @@ -33,12 +30,10 @@ destination = os.path.join(builddir, "installation") dist = Distribution({"name": "foopkg"}) - # script_name need not exist, it just need to be initialized - dist.script_name = os.path.join(builddir, "setup.py") dist.command_obj["build"] = support.DummyCommand( build_base=builddir, build_lib=os.path.join(builddir, "lib"), - ) + ) old_posix_prefix = _SCHEMES.get('posix_prefix', 'platinclude') old_posix_home = _SCHEMES.get('posix_home', 'platinclude') @@ -104,21 +99,21 @@ def _test_user_site(self): schemes = get_scheme_names() for key in ('nt_user', 'posix_user', 'os2_home'): - self.assertTrue(key in schemes) + self.assertIn(key, schemes) dist = Distribution({'name': 'xx'}) cmd = install_dist(dist) # making sure the user option is there options = [name for name, short, lable in cmd.user_options] - self.assertTrue('user' in options) + self.assertIn('user', options) # setting a value - cmd.user = 1 + cmd.user = True # user base and site shouldn't be created yet - self.assertTrue(not os.path.exists(self.user_base)) - self.assertTrue(not os.path.exists(self.user_site)) + self.assertFalse(os.path.exists(self.user_base)) + self.assertFalse(os.path.exists(self.user_site)) # let's run finalize cmd.ensure_finalized() @@ -127,8 +122,8 @@ self.assertTrue(os.path.exists(self.user_base)) self.assertTrue(os.path.exists(self.user_site)) - self.assertTrue('userbase' in cmd.config_vars) - self.assertTrue('usersite' in cmd.config_vars) + self.assertIn('userbase', cmd.config_vars) + self.assertIn('usersite', cmd.config_vars) def test_handle_extra_path(self): dist = Distribution({'name': 'xx', 'extra_path': 'path,dirs'}) @@ -156,7 +151,7 @@ # three elements (no way !) cmd.extra_path = 'path,dirs,again' - self.assertRaises(DistutilsOptionError, cmd.handle_extra_path) + self.assertRaises(PackagingOptionError, cmd.handle_extra_path) def test_finalize_options(self): dist = Distribution({'name': 'xx'}) @@ -166,19 +161,19 @@ # install-base/install-platbase -- not both cmd.prefix = 'prefix' cmd.install_base = 'base' - self.assertRaises(DistutilsOptionError, cmd.finalize_options) + self.assertRaises(PackagingOptionError, cmd.finalize_options) # must supply either home or prefix/exec-prefix -- not both cmd.install_base = None cmd.home = 'home' - self.assertRaises(DistutilsOptionError, cmd.finalize_options) + self.assertRaises(PackagingOptionError, cmd.finalize_options) if sys.version >= '2.6': # can't combine user with with prefix/exec_prefix/home or # install_(plat)base cmd.prefix = None cmd.user = 'user' - self.assertRaises(DistutilsOptionError, cmd.finalize_options) + self.assertRaises(PackagingOptionError, cmd.finalize_options) def test_old_record(self): # test pre-PEP 376 --record option (outside dist-info dir) @@ -196,25 +191,12 @@ # let's check the record file was created with four # lines, one for each .dist-info entry: METADATA, # INSTALLER, REQUSTED, RECORD - f = open(cmd.record) - try: + with open(cmd.record) as f: self.assertEqual(len(f.readlines()), 4) - finally: - f.close() # XXX test that fancy_getopt is okay with options named # record and no-record but unrelated - def _test_debug_mode(self): - # this covers the code called when DEBUG is set - old_logs_len = len(self.logs) - install_module.DEBUG = True - try: - __, stdout = captured_stdout(self.test_record) - finally: - install_module.DEBUG = False - self.assertTrue(len(self.logs) > old_logs_len) - def test_suite(): return unittest.makeSuite(InstallTestCase) diff --git a/distutils2/tests/test_command_install_distinfo.py b/distutils2/tests/test_command_install_distinfo.py --- a/distutils2/tests/test_command_install_distinfo.py +++ b/distutils2/tests/test_command_install_distinfo.py @@ -2,17 +2,14 @@ import os import csv +import hashlib +import sys from distutils2.command.install_distinfo import install_distinfo from distutils2.command.cmd import Command from distutils2.metadata import Metadata from distutils2.tests import unittest, support -try: - import hashlib -except ImportError: - from distutils2._backport import hashlib - class DummyInstallCmd(Command): @@ -21,19 +18,18 @@ self.distribution = dist def __getattr__(self, name): - return None + return None def ensure_finalized(self): pass def get_outputs(self): - return self.outputs + \ - self.get_finalized_command('install_distinfo').get_outputs() + return (self.outputs + + self.get_finalized_command('install_distinfo').get_outputs()) class InstallDistinfoTestCase(support.TempdirManager, support.LoggingCatcher, - support.EnvironGuard, unittest.TestCase): checkLists = lambda self, x, y: self.assertListEqual(sorted(x), sorted(y)) @@ -59,10 +55,10 @@ dist_info = os.path.join(install_dir, 'foo-1.0.dist-info') self.checkLists(os.listdir(dist_info), ['METADATA', 'RECORD', 'REQUESTED', 'INSTALLER']) - self.assertEqual(open(os.path.join(dist_info, 'INSTALLER')).read(), - 'distutils') - self.assertEqual(open(os.path.join(dist_info, 'REQUESTED')).read(), - '') + with open(os.path.join(dist_info, 'INSTALLER')) as fp: + self.assertEqual(fp.read(), 'distutils') + with open(os.path.join(dist_info, 'REQUESTED')) as fp: + self.assertEqual(fp.read(), '') meta_path = os.path.join(dist_info, 'METADATA') self.assertTrue(Metadata(path=meta_path).check()) @@ -84,8 +80,8 @@ cmd.run() dist_info = os.path.join(install_dir, 'foo-1.0.dist-info') - self.assertEqual(open(os.path.join(dist_info, 'INSTALLER')).read(), - 'bacon-python') + with open(os.path.join(dist_info, 'INSTALLER')) as fp: + self.assertEqual(fp.read(), 'bacon-python') def test_requested(self): pkg_dir, dist = self.create_dist(name='foo', @@ -137,25 +133,23 @@ install = DummyInstallCmd(dist) dist.command_obj['install_dist'] = install - fake_dists = os.path.join(os.path.dirname(__file__), '..', - '_backport', 'tests', 'fake_dists') + fake_dists = os.path.join(os.path.dirname(__file__), 'fake_dists') fake_dists = os.path.realpath(fake_dists) # for testing, we simply add all files from _backport's fake_dists dirs = [] for dir in os.listdir(fake_dists): - full_path = os.path.join(fake_dists, dir) - if (not dir.endswith('.egg') or dir.endswith('.egg-info') or - dir.endswith('.dist-info')) and os.path.isdir(full_path): - dirs.append(full_path) + full_path = os.path.join(fake_dists, dir) + if (not dir.endswith('.egg') or dir.endswith('.egg-info') or + dir.endswith('.dist-info')) and os.path.isdir(full_path): + dirs.append(full_path) for dir in dirs: - for (path, subdirs, files) in os.walk(dir): + for path, subdirs, files in os.walk(dir): install.outputs += [os.path.join(path, f) for f in files] install.outputs += [os.path.join('path', f + 'c') for f in files if f.endswith('.py')] - cmd = install_distinfo(dist) dist.command_obj['install_distinfo'] = cmd @@ -168,25 +162,23 @@ expected = [] for f in install.get_outputs(): - if f.endswith('.pyc') or \ - f == os.path.join(install_dir, 'foo-1.0.dist-info', 'RECORD'): + if (f.endswith(('.pyc', '.pyo')) or f == os.path.join( + install_dir, 'foo-1.0.dist-info', 'RECORD')): expected.append([f, '', '']) else: size = os.path.getsize(f) md5 = hashlib.md5() - md5.update(open(f).read()) + with open(f, 'rb') as fp: + md5.update(fp.read()) hash = md5.hexdigest() expected.append([f, hash, str(size)]) parsed = [] - f = open(os.path.join(dist_info, 'RECORD'), 'rb') - try: + with open(os.path.join(dist_info, 'RECORD'), 'r') as f: reader = csv.reader(f, delimiter=',', lineterminator=os.linesep, quotechar='"') parsed = list(reader) - finally: - f.close() self.maxDiff = None self.checkLists(parsed, expected) diff --git a/distutils2/tests/test_command_install_headers.py b/distutils2/tests/test_command_install_headers.py --- a/distutils2/tests/test_command_install_headers.py +++ b/distutils2/tests/test_command_install_headers.py @@ -1,12 +1,12 @@ -"""Tests for distutils.command.install_headers.""" +"""Tests for distutils2.command.install_headers.""" import os from distutils2.command.install_headers import install_headers from distutils2.tests import unittest, support + class InstallHeadersTestCase(support.TempdirManager, support.LoggingCatcher, - support.EnvironGuard, unittest.TestCase): def test_simple_run(self): @@ -30,6 +30,7 @@ # let's check the results self.assertEqual(len(cmd.get_outputs()), 2) + def test_suite(): return unittest.makeSuite(InstallHeadersTestCase) diff --git a/distutils2/tests/test_command_install_lib.py b/distutils2/tests/test_command_install_lib.py --- a/distutils2/tests/test_command_install_lib.py +++ b/distutils2/tests/test_command_install_lib.py @@ -1,11 +1,11 @@ -"""Tests for distutils.command.install_data.""" +"""Tests for distutils2.command.install_data.""" import sys import os +from distutils2.tests import unittest, support from distutils2.command.install_lib import install_lib from distutils2.compiler.extension import Extension -from distutils2.tests import unittest, support -from distutils2.errors import DistutilsOptionError +from distutils2.errors import PackagingOptionError try: no_bytecode = sys.dont_write_bytecode @@ -14,24 +14,27 @@ no_bytecode = False bytecode_support = False + class InstallLibTestCase(support.TempdirManager, support.LoggingCatcher, - support.EnvironGuard, + support.EnvironRestorer, unittest.TestCase): + restore_environ = ['PYTHONPATH'] + def test_finalize_options(self): pkg_dir, dist = self.create_dist() cmd = install_lib(dist) cmd.finalize_options() - self.assertEqual(cmd.compile, 1) + self.assertTrue(cmd.compile) self.assertEqual(cmd.optimize, 0) # optimize must be 0, 1, or 2 cmd.optimize = 'foo' - self.assertRaises(DistutilsOptionError, cmd.finalize_options) + self.assertRaises(PackagingOptionError, cmd.finalize_options) cmd.optimize = '4' - self.assertRaises(DistutilsOptionError, cmd.finalize_options) + self.assertRaises(PackagingOptionError, cmd.finalize_options) cmd.optimize = '2' cmd.finalize_options() @@ -41,7 +44,8 @@ def test_byte_compile(self): pkg_dir, dist = self.create_dist() cmd = install_lib(dist) - cmd.compile = cmd.optimize = 1 + cmd.compile = True + cmd.optimize = 1 f = os.path.join(pkg_dir, 'foo.py') self.write_file(f, '# python file') @@ -54,29 +58,33 @@ cmd = install_lib(dist) # setting up a dist environment - cmd.compile = cmd.optimize = 1 + cmd.compile = True + cmd.optimize = 1 cmd.install_dir = pkg_dir f = os.path.join(pkg_dir, '__init__.py') self.write_file(f, '# python package') cmd.distribution.ext_modules = [Extension('foo', ['xxx'])] cmd.distribution.packages = [pkg_dir] - cmd.distribution.script_name = 'setup.py' + + # make sure the build_lib is set the temp dir + build_dir = os.path.split(pkg_dir)[0] + cmd.get_finalized_command('build_py').build_lib = build_dir # get_output should return 4 elements - self.assertTrue(len(cmd.get_outputs()) >= 2) + self.assertEqual(len(cmd.get_outputs()), 4) def test_get_inputs(self): pkg_dir, dist = self.create_dist() cmd = install_lib(dist) # setting up a dist environment - cmd.compile = cmd.optimize = 1 + cmd.compile = True + cmd.optimize = 1 cmd.install_dir = pkg_dir f = os.path.join(pkg_dir, '__init__.py') self.write_file(f, '# python package') cmd.distribution.ext_modules = [Extension('foo', ['xxx'])] cmd.distribution.packages = [pkg_dir] - cmd.distribution.script_name = 'setup.py' # get_input should return 2 elements self.assertEqual(len(cmd.get_inputs()), 2) @@ -87,17 +95,16 @@ # makes sure byte_compile is not used pkg_dir, dist = self.create_dist() cmd = install_lib(dist) - cmd.compile = 1 + cmd.compile = True cmd.optimize = 1 - old_dont_write_bytecode = sys.dont_write_bytecode + self.addCleanup(setattr, sys, 'dont_write_bytecode', + sys.dont_write_bytecode) sys.dont_write_bytecode = True - try: - cmd.byte_compile([]) - finally: - sys.dont_write_bytecode = old_dont_write_bytecode + cmd.byte_compile([]) - self.assertIn('byte-compiling is disabled', self.logs[0][2][1]) + self.assertIn('byte-compiling is disabled', self.get_logs()[0]) + def test_suite(): return unittest.makeSuite(InstallLibTestCase) diff --git a/distutils2/tests/test_command_install_scripts.py b/distutils2/tests/test_command_install_scripts.py --- a/distutils2/tests/test_command_install_scripts.py +++ b/distutils2/tests/test_command_install_scripts.py @@ -1,12 +1,10 @@ -"""Tests for distutils.command.install_scripts.""" - +"""Tests for distutils2.command.install_scripts.""" import os +from distutils2.tests import unittest, support from distutils2.command.install_scripts import install_scripts from distutils2.dist import Distribution -from distutils2.tests import unittest, support - class InstallScriptsTestCase(support.TempdirManager, support.LoggingCatcher, @@ -18,14 +16,14 @@ build_scripts="/foo/bar") dist.command_obj["install_dist"] = support.DummyCommand( install_scripts="/splat/funk", - force=1, - skip_build=1, + force=True, + skip_build=True, ) cmd = install_scripts(dist) - self.assertTrue(not cmd.force) - self.assertTrue(not cmd.skip_build) - self.assertTrue(cmd.build_dir is None) - self.assertTrue(cmd.install_dir is None) + self.assertFalse(cmd.force) + self.assertFalse(cmd.skip_build) + self.assertIs(cmd.build_dir, None) + self.assertIs(cmd.install_dir, None) cmd.finalize_options() @@ -40,11 +38,8 @@ def write_script(name, text): expected.append(name) - f = open(os.path.join(source, name), "w") - try: + with open(os.path.join(source, name), "w") as f: f.write(text) - finally: - f.close() write_script("script1.py", ("#! /usr/bin/env python2.3\n" "# bogus script w/ Python sh-bang\n" @@ -61,8 +56,8 @@ dist.command_obj["build"] = support.DummyCommand(build_scripts=source) dist.command_obj["install_dist"] = support.DummyCommand( install_scripts=target, - force=1, - skip_build=1, + force=True, + skip_build=True, ) cmd = install_scripts(dist) cmd.finalize_options() @@ -70,7 +65,7 @@ installed = os.listdir(target) for name in expected: - self.assertTrue(name in installed) + self.assertIn(name, installed) def test_suite(): diff --git a/distutils2/tests/test_command_register.py b/distutils2/tests/test_command_register.py --- a/distutils2/tests/test_command_register.py +++ b/distutils2/tests/test_command_register.py @@ -1,21 +1,18 @@ -# -*- encoding: utf-8 -*- -"""Tests for distutils.command.register.""" +"""Tests for distutils2.command.register.""" import os import getpass import urllib2 - try: import docutils DOCUTILS_SUPPORT = True except ImportError: DOCUTILS_SUPPORT = False +from distutils2.tests import unittest, support from distutils2.command import register as register_module from distutils2.command.register import register -from distutils2.dist import Distribution -from distutils2.errors import DistutilsSetupError +from distutils2.errors import PackagingSetupError -from distutils2.tests import unittest, support PYPIRC_NOPASSWORD = """\ [distutils] @@ -37,7 +34,8 @@ password:password """ -class RawInputs(object): + +class Inputs(object): """Fakes user inputs.""" def __init__(self, *answers): self.answers = answers @@ -49,6 +47,7 @@ finally: self.index += 1 + class FakeOpener(object): """Fakes a PyPI server""" def __init__(self): @@ -64,11 +63,14 @@ def read(self): return 'xxx' + class RegisterTestCase(support.TempdirManager, - support.EnvironGuard, + support.EnvironRestorer, support.LoggingCatcher, unittest.TestCase): + restore_environ = ['HOME'] + def setUp(self): super(RegisterTestCase, self).setUp() self.tmp_dir = self.mkdtemp() @@ -77,8 +79,10 @@ # patching the password prompt self._old_getpass = getpass.getpass + def _getpass(prompt): return 'password' + getpass.getpass = _getpass self.old_opener = urllib2.build_opener self.conn = urllib2.build_opener = FakeOpener() @@ -86,8 +90,8 @@ def tearDown(self): getpass.getpass = self._old_getpass urllib2.build_opener = self.old_opener - if hasattr(register_module, 'raw_input'): - del register_module.raw_input + if hasattr(register_module, 'input'): + del register_module.input super(RegisterTestCase, self).tearDown() def _get_cmd(self, metadata=None): @@ -106,24 +110,26 @@ cmd = self._get_cmd() # we shouldn't have a .pypirc file yet - self.assertTrue(not os.path.exists(self.rc)) + self.assertFalse(os.path.exists(self.rc)) - # patching raw_input and getpass.getpass + # patching input and getpass.getpass # so register gets happy # Here's what we are faking : # use your existing login (choice 1.) # Username : 'tarek' # Password : 'password' # Save your login (y/N)? : 'y' - inputs = RawInputs('1', 'tarek', 'y') - register_module.raw_input = inputs.__call__ + inputs = Inputs('1', 'tarek', 'y') + register_module.input = inputs + cmd.ensure_finalized() cmd.run() # we should have a brand new .pypirc file self.assertTrue(os.path.exists(self.rc)) # with the content similar to WANTED_PYPIRC - content = open(self.rc).read() + with open(self.rc) as fp: + content = fp.read() self.assertEqual(content, WANTED_PYPIRC) # now let's make sure the .pypirc file generated @@ -132,17 +138,18 @@ def _no_way(prompt=''): raise AssertionError(prompt) - register_module.raw_input = _no_way - cmd.show_response = 1 + register_module.input = _no_way + cmd.show_response = True + cmd.ensure_finalized() cmd.run() # let's see what the server received : we should # have 2 similar requests - self.assertTrue(self.conn.reqs, 2) + self.assertEqual(len(self.conn.reqs), 2) req1 = dict(self.conn.reqs[0].headers) req2 = dict(self.conn.reqs[1].headers) self.assertEqual(req2['Content-length'], req1['Content-length']) - self.assertTrue('xxx' in self.conn.reqs[1].data) + self.assertIn(b'xxx', self.conn.reqs[1].data) def test_password_not_in_file(self): @@ -159,33 +166,34 @@ def test_registration(self): # this test runs choice 2 cmd = self._get_cmd() - inputs = RawInputs('2', 'tarek', 'tarek at ziade.org') - register_module.raw_input = inputs.__call__ + inputs = Inputs('2', 'tarek', 'tarek at ziade.org') + register_module.input = inputs # let's run the command # FIXME does this send a real request? use a mock server - # also, silence self.announce (with LoggingCatcher) + cmd.ensure_finalized() cmd.run() # we should have send a request - self.assertTrue(self.conn.reqs, 1) + self.assertEqual(len(self.conn.reqs), 1) req = self.conn.reqs[0] headers = dict(req.headers) - self.assertEqual(headers['Content-length'], '608') - self.assertTrue('tarek' in req.data) + self.assertEqual(headers['Content-length'], '628') + self.assertIn(b'tarek', req.data) def test_password_reset(self): # this test runs choice 3 cmd = self._get_cmd() - inputs = RawInputs('3', 'tarek at ziade.org') - register_module.raw_input = inputs.__call__ + inputs = Inputs('3', 'tarek at ziade.org') + register_module.input = inputs + cmd.ensure_finalized() cmd.run() # we should have send a request - self.assertTrue(self.conn.reqs, 1) + self.assertEqual(len(self.conn.reqs), 1) req = self.conn.reqs[0] headers = dict(req.headers) - self.assertEqual(headers['Content-length'], '290') - self.assertTrue('tarek' in req.data) + self.assertEqual(headers['Content-length'], '298') + self.assertIn(b'tarek', req.data) @unittest.skipUnless(DOCUTILS_SUPPORT, 'needs docutils') def test_strict(self): @@ -197,37 +205,39 @@ # empty metadata cmd = self._get_cmd({'name': 'xxx', 'version': 'xxx'}) cmd.ensure_finalized() - cmd.strict = 1 - inputs = RawInputs('1', 'tarek', 'y') - register_module.raw_input = inputs.__call__ - self.assertRaises(DistutilsSetupError, cmd.run) + cmd.strict = True + inputs = Inputs('1', 'tarek', 'y') + register_module.input = inputs + self.assertRaises(PackagingSetupError, cmd.run) # metadata is OK but long_description is broken metadata = {'home_page': 'xxx', 'author': 'xxx', - 'author_email': u'??x??x??', + 'author_email': '\xc3x\xc3x\xc3', 'name': 'xxx', 'version': 'xxx', 'description': 'title\n==\n\ntext'} cmd = self._get_cmd(metadata) cmd.ensure_finalized() - cmd.strict = 1 + cmd.strict = True - self.assertRaises(DistutilsSetupError, cmd.run) + self.assertRaises(PackagingSetupError, cmd.run) # now something that works metadata['description'] = 'title\n=====\n\ntext' cmd = self._get_cmd(metadata) cmd.ensure_finalized() - cmd.strict = 1 - inputs = RawInputs('1', 'tarek', 'y') - register_module.raw_input = inputs.__call__ + cmd.strict = True + inputs = Inputs('1', 'tarek', 'y') + register_module.input = inputs + cmd.ensure_finalized() cmd.run() # strict is not by default cmd = self._get_cmd() cmd.ensure_finalized() - inputs = RawInputs('1', 'tarek', 'y') - register_module.raw_input = inputs.__call__ + inputs = Inputs('1', 'tarek', 'y') + register_module.input = inputs + cmd.ensure_finalized() cmd.run() def test_register_pep345(self): @@ -238,6 +248,7 @@ self.assertEqual(data['metadata_version'], '1.2') self.assertEqual(data['requires_dist'], ['lxml']) + def test_suite(): return unittest.makeSuite(RegisterTestCase) diff --git a/distutils2/tests/test_command_sdist.py b/distutils2/tests/test_command_sdist.py --- a/distutils2/tests/test_command_sdist.py +++ b/distutils2/tests/test_command_sdist.py @@ -1,16 +1,10 @@ -"""Tests for distutils.command.sdist.""" +"""Tests for distutils2.command.sdist.""" import os -import shutil import zipfile import tarfile import logging -# zlib is not used here, but if it's not available -# the tests that use zipfile may fail -try: - import zlib -except ImportError: - zlib = None +from distutils2.tests.support import requires_zlib try: import grp @@ -20,32 +14,20 @@ UID_GID_SUPPORT = False from os.path import join -import sys - from distutils2.tests import captured_stdout - from distutils2.command.sdist import sdist from distutils2.command.sdist import show_formats from distutils2.dist import Distribution from distutils2.tests import unittest -from distutils2.errors import DistutilsExecError, DistutilsOptionError -from distutils2.util import find_executable +from distutils2.errors import PackagingOptionError +from distutils2.util import find_executable, get_archive_formats from distutils2.tests import support -try: - from shutil import get_archive_formats -except ImportError: - from distutils2._backport.shutil import get_archive_formats -SETUP_PY = """ -from distutils.core import setup -import somecode - -setup(name='fake') -""" MANIFEST = """\ -# file GENERATED by distutils, do NOT edit +# file GENERATED by distutils2, do NOT edit inroot.txt +setup.cfg data%(sep)sdata.dt scripts%(sep)sscript.py some%(sep)sfile.txt @@ -55,16 +37,19 @@ somecode%(sep)sdoc.txt """ + def builder(dist, filelist): filelist.append('bah') -class SDistTestCase(support.TempdirManager, support.LoggingCatcher, - support.EnvironGuard, unittest.TestCase): +class SDistTestCase(support.TempdirManager, + support.LoggingCatcher, + support.EnvironRestorer, + unittest.TestCase): + + restore_environ = ['HOME'] def setUp(self): - # PyPIRCCommandTestCase creates a temp dir already - # and put it in self.tmp_dir super(SDistTestCase, self).setUp() self.tmp_dir = self.mkdtemp() os.environ['HOME'] = self.tmp_dir @@ -75,7 +60,6 @@ # a package, and a README self.write_file((self.tmp_dir, 'README'), 'xxx') self.write_file((self.tmp_dir, 'somecode', '__init__.py'), '#') - self.write_file((self.tmp_dir, 'setup.py'), SETUP_PY) os.chdir(self.tmp_dir) def tearDown(self): @@ -90,14 +74,13 @@ 'url': 'xxx', 'author': 'xxx', 'author_email': 'xxx'} dist = Distribution(metadata) - dist.script_name = 'setup.py' dist.packages = ['somecode'] dist.include_package_data = True cmd = sdist(dist) cmd.dist_dir = 'dist' return dist, cmd - @unittest.skipUnless(zlib, "requires zlib") + @requires_zlib def test_prune_file_list(self): # this test creates a package with some vcs dirs in it # and launch sdist to make sure they get pruned @@ -105,6 +88,7 @@ # creating VCS directories with some files in them os.mkdir(join(self.tmp_dir, 'somecode', '.svn')) + self.write_file((self.tmp_dir, 'somecode', '.svn', 'ok.py'), 'xxx') os.mkdir(join(self.tmp_dir, 'somecode', '.hg')) @@ -139,15 +123,12 @@ # making sure everything has been pruned correctly self.assertEqual(len(content), 2) - @unittest.skipUnless(zlib, "requires zlib") + @requires_zlib + @unittest.skipIf(find_executable('tar') is None or + find_executable('gzip') is None, + 'requires tar and gzip programs') def test_make_distribution(self): - - # check if tar and gzip are installed - if (find_executable('tar') is None or - find_executable('gzip') is None): - return - - # now building a sdist + # building a sdist dist, cmd = self.get_cmd() # creating a gztar then a tar @@ -157,10 +138,8 @@ # making sure we have two files dist_folder = join(self.tmp_dir, 'dist') - result = os.listdir(dist_folder) - result.sort() - self.assertEqual(result, - ['fake-1.0.tar', 'fake-1.0.tar.gz'] ) + result = sorted(os.listdir(dist_folder)) + self.assertEqual(result, ['fake-1.0.tar', 'fake-1.0.tar.gz']) os.remove(join(dist_folder, 'fake-1.0.tar')) os.remove(join(dist_folder, 'fake-1.0.tar.gz')) @@ -171,12 +150,10 @@ cmd.ensure_finalized() cmd.run() - result = os.listdir(dist_folder) - result.sort() - self.assertEqual(result, - ['fake-1.0.tar', 'fake-1.0.tar.gz']) + result = sorted(os.listdir(dist_folder)) + self.assertEqual(result, ['fake-1.0.tar', 'fake-1.0.tar.gz']) - @unittest.skipUnless(zlib, "requires zlib") + @requires_zlib def test_add_defaults(self): # http://bugs.python.org/issue2279 @@ -189,6 +166,7 @@ # in package_data dist.package_data = {'': ['*.cfg', '*.dat'], 'somecode': ['*.txt']} + self.write_file((self.tmp_dir, 'setup.cfg'), '#') self.write_file((self.tmp_dir, 'somecode', 'doc.txt'), '#') self.write_file((self.tmp_dir, 'somecode', 'doc.dat'), '#') @@ -202,10 +180,10 @@ self.write_file((some_dir, 'file.txt'), '#') self.write_file((some_dir, 'other_file.txt'), '#') - dist.data_files = {'data/data.dt' : '{appdata}/data.dt', - 'inroot.txt' : '{appdata}/inroot.txt', - 'some/file.txt' : '{appdata}/file.txt', - 'some/other_file.txt' : '{appdata}/other_file.txt'} + dist.data_files = {'data/data.dt': '{appdata}/data.dt', + 'inroot.txt': '{appdata}/inroot.txt', + 'some/file.txt': '{appdata}/file.txt', + 'some/other_file.txt': '{appdata}/other_file.txt'} # adding a script script_dir = join(self.tmp_dir, 'scripts') @@ -230,38 +208,40 @@ finally: zip_file.close() - # making sure everything was added - self.assertEqual(len(content), 9) + # Making sure everything was added. This includes 8 code and data + # files in addition to PKG-INFO and setup.cfg + self.assertEqual(len(content), 10) - # checking the MANIFEST - manifest = open(join(self.tmp_dir, 'MANIFEST')).read() + # Checking the MANIFEST + with open(join(self.tmp_dir, 'MANIFEST')) as fp: + manifest = fp.read() self.assertEqual(manifest, MANIFEST % {'sep': os.sep}) - @unittest.skipUnless(zlib, "requires zlib") + @requires_zlib def test_metadata_check_option(self): # testing the `check-metadata` option - dist, cmd = self.get_cmd(metadata={'name':'xxx', 'version':'xxx'}) + dist, cmd = self.get_cmd(metadata={'name': 'xxx', 'version': 'xxx'}) - # this should raise some warnings ! - # with the `check` subcommand + # this should raise some warnings + # with the check subcommand cmd.ensure_finalized() cmd.run() warnings = self.get_logs(logging.WARN) - self.assertEqual(len(warnings), 2) + self.assertEqual(len(warnings), 4) # trying with a complete set of metadata - self.clear_logs() + self.loghandler.flush() dist, cmd = self.get_cmd() cmd.ensure_finalized() - cmd.metadata_check = 0 + cmd.metadata_check = False cmd.run() warnings = self.get_logs(logging.WARN) # removing manifest generated warnings warnings = [warn for warn in warnings if not warn.endswith('-- skipping')] - # the remaining warning is about the use of the default file list - self.assertEqual(len(warnings), 1) - + # the remaining warnings are about the use of the default file list and + # the absence of setup.cfg + self.assertEqual(len(warnings), 2) def test_show_formats(self): __, stdout = captured_stdout(show_formats) @@ -284,24 +264,21 @@ # formats has to be a string splitable on (' ', ',') or # a stringlist cmd.formats = 1 - self.assertRaises(DistutilsOptionError, cmd.finalize_options) + self.assertRaises(PackagingOptionError, cmd.finalize_options) cmd.formats = ['zip'] cmd.finalize_options() # formats has to be known cmd.formats = 'supazipa' - self.assertRaises(DistutilsOptionError, cmd.finalize_options) + self.assertRaises(PackagingOptionError, cmd.finalize_options) - @unittest.skipUnless(zlib, "requires zlib") + @requires_zlib @unittest.skipUnless(UID_GID_SUPPORT, "requires grp and pwd support") + @unittest.skipIf(find_executable('tar') is None or + find_executable('gzip') is None, + 'requires tar and gzip programs') def test_make_distribution_owner_group(self): - - # check if tar and gzip are installed - if (find_executable('tar') is None or - find_executable('gzip') is None): - return - - # now building a sdist + # building a sdist dist, cmd = self.get_cmd() # creating a gztar and specifying the owner+group @@ -332,71 +309,65 @@ # making sure we have the good rights archive_name = join(self.tmp_dir, 'dist', 'fake-1.0.tar.gz') archive = tarfile.open(archive_name) - - # note that we are not testing the group ownership here - # because, depending on the platforms and the container - # rights (see #7408) try: + # note that we are not testing the group ownership here + # because, depending on the platforms and the container + # rights (see #7408) for member in archive.getmembers(): self.assertEqual(member.uid, os.getuid()) finally: archive.close() + @requires_zlib def test_get_file_list(self): # make sure MANIFEST is recalculated dist, cmd = self.get_cmd() - # filling data_files by pointing files in package_data dist.package_data = {'somecode': ['*.txt']} self.write_file((self.tmp_dir, 'somecode', 'doc.txt'), '#') cmd.ensure_finalized() cmd.run() - f = open(cmd.manifest) - try: + # Should produce four lines. Those lines are one comment, one default + # (README) and two package files. + with open(cmd.manifest) as f: manifest = [line.strip() for line in f.read().split('\n') if line.strip() != ''] - finally: - f.close() self.assertEqual(len(manifest), 3) - # adding a file + # Adding a file self.write_file((self.tmp_dir, 'somecode', 'doc2.txt'), '#') - # make sure build_py is reinitinialized, like a fresh run + # make sure build_py is reinitialized, like a fresh run build_py = dist.get_command_obj('build_py') build_py.finalized = False build_py.ensure_finalized() cmd.run() - f = open(cmd.manifest) - try: + with open(cmd.manifest) as f: manifest2 = [line.strip() for line in f.read().split('\n') if line.strip() != ''] - finally: - f.close() - # do we have the new file in MANIFEST ? + # Do we have the new file in MANIFEST? self.assertEqual(len(manifest2), 4) self.assertIn('doc2.txt', manifest2[-1]) + @requires_zlib def test_manifest_marker(self): # check that autogenerated MANIFESTs have a marker dist, cmd = self.get_cmd() cmd.ensure_finalized() cmd.run() - f = open(cmd.manifest) - try: + with open(cmd.manifest) as f: manifest = [line.strip() for line in f.read().split('\n') if line.strip() != ''] - finally: - f.close() self.assertEqual(manifest[0], - '# file GENERATED by distutils, do NOT edit') + '# file GENERATED by distutils2, do NOT edit') + @requires_zlib def test_manual_manifest(self): # check that a MANIFEST without a marker is left alone dist, cmd = self.get_cmd() @@ -404,29 +375,25 @@ self.write_file((self.tmp_dir, cmd.manifest), 'README.manual') cmd.run() - f = open(cmd.manifest) - try: + with open(cmd.manifest) as f: manifest = [line.strip() for line in f.read().split('\n') if line.strip() != ''] - finally: - f.close() self.assertEqual(manifest, ['README.manual']) + @requires_zlib def test_template(self): dist, cmd = self.get_cmd() dist.extra_files = ['include yeah'] cmd.ensure_finalized() self.write_file((self.tmp_dir, 'yeah'), 'xxx') cmd.run() - f = open(cmd.manifest) - try: + with open(cmd.manifest) as f: content = f.read() - finally: - f.close() self.assertIn('yeah', content) + @requires_zlib def test_manifest_builder(self): dist, cmd = self.get_cmd() cmd.manifest_builders = 'distutils2.tests.test_command_sdist.builder' diff --git a/distutils2/tests/test_command_test.py b/distutils2/tests/test_command_test.py --- a/distutils2/tests/test_command_test.py +++ b/distutils2/tests/test_command_test.py @@ -2,20 +2,19 @@ import re import sys import shutil +import logging import unittest as ut1 +import distutils2.database -from copy import copy from os.path import join from operator import getitem, setitem, delitem -from StringIO import StringIO - from distutils2.command.build import build from distutils2.tests import unittest -from distutils2.tests.support import TempdirManager, LoggingCatcher +from distutils2.tests.support import (TempdirManager, EnvironRestorer, + LoggingCatcher) from distutils2.command.test import test from distutils2.command import set_command from distutils2.dist import Distribution -from distutils2._backport import pkgutil EXPECTED_OUTPUT_RE = r'''FAIL: test_blah \(myowntestmodule.SomeTest\) @@ -28,32 +27,37 @@ here = os.path.dirname(os.path.abspath(__file__)) -_RECORD = [] class MockBuildCmd(build): build_lib = "mock build lib" command_name = 'build' plat_name = 'whatever' - def initialize_options(self): pass - def finalize_options(self): pass - def run(self): _RECORD.append("build run") + + def initialize_options(self): + pass + + def finalize_options(self): + pass + + def run(self): + self._record.append("build has run") class TestTest(TempdirManager, + EnvironRestorer, LoggingCatcher, unittest.TestCase): + restore_environ = ['PYTHONPATH'] + def setUp(self): super(TestTest, self).setUp() - - distutils2path = os.path.dirname(os.path.dirname(here)) - self.old_pythonpath = os.environ.get('PYTHONPATH', '') - os.environ['PYTHONPATH'] = distutils2path + os.pathsep + self.old_pythonpath - - def tearDown(self): - pkgutil.clear_cache() - os.environ['PYTHONPATH'] = self.old_pythonpath - super(TestTest, self).tearDown() + self.addCleanup(distutils2.database.clear_cache) + new_pythonpath = os.path.dirname(os.path.dirname(here)) + pythonpath = os.environ.get('PYTHONPATH') + if pythonpath is not None: + new_pythonpath = os.pathsep.join((new_pythonpath, pythonpath)) + os.environ['PYTHONPATH'] = new_pythonpath def assert_re_match(self, pattern, string): def quote(s): @@ -71,7 +75,8 @@ shutil.copytree(pkg_dir, temp_pkg_dir) return temp_pkg_dir - def safely_replace(self, obj, attr, new_val=None, delete=False, dictionary=False): + def safely_replace(self, obj, attr, + new_val=None, delete=False, dictionary=False): """Replace a object's attribute returning to its original state at the end of the test run. Creates the attribute if not present before (deleting afterwards). When delete=True, makes sure the value is del'd @@ -79,10 +84,12 @@ rather than attributes.""" if dictionary: _setattr, _getattr, _delattr = setitem, getitem, delitem + def _hasattr(_dict, value): return value in _dict else: - _setattr, _getattr, _delattr, _hasattr = setattr, getattr, delattr, hasattr + _setattr, _getattr, _delattr, _hasattr = (setattr, getattr, + delattr, hasattr) orig_has_attr = _hasattr(obj, attr) if orig_has_attr: @@ -107,9 +114,12 @@ a_module.recorder = lambda *args: record.append("suite") class MockTextTestRunner(object): - def __init__(*_, **__): pass + def __init__(*_, **__): + pass + def run(_self, suite): record.append("run") + self.safely_replace(ut1, "TextTestRunner", MockTextTestRunner) dist = Distribution() @@ -119,17 +129,16 @@ self.assertEqual(record, ["suite", "run"]) def test_builds_before_running_tests(self): + self.addCleanup(set_command, 'distutils2.command.build.build') set_command('distutils2.tests.test_command_test.MockBuildCmd') - try: - dist = Distribution() - cmd = test(dist) - cmd.runner = self.prepare_named_function(lambda: None) - _RECORD[:] = [] - cmd.ensure_finalized() - cmd.run() - self.assertEqual(_RECORD, ['build run']) - finally: - set_command('distutils2.command.build.build') + + dist = Distribution() + dist.get_command_obj('build')._record = record = [] + cmd = test(dist) + cmd.runner = self.prepare_named_function(lambda: None) + cmd.ensure_finalized() + cmd.run() + self.assertEqual(['build has run'], record) def _test_works_with_2to3(self): pass @@ -139,16 +148,14 @@ cmd = test(dist) phony_project = 'ohno_ohno-impossible_1234-name_stop-that!' cmd.tests_require = [phony_project] - record = [] - cmd.announce = lambda *args: record.append(args) cmd.ensure_finalized() - self.assertEqual(1, len(record)) - self.assertIn(phony_project, record[0][0]) + logs = self.get_logs(logging.WARNING) + self.assertIn(phony_project, logs[-1]) def prepare_a_module(self): tmp_dir = self.mkdtemp() sys.path.append(tmp_dir) - self.addCleanup(lambda: sys.path.remove(tmp_dir)) + self.addCleanup(sys.path.remove, tmp_dir) self.write_file((tmp_dir, 'distutils2_tests_a.py'), '') import distutils2_tests_a as a_module @@ -163,21 +170,30 @@ dist = Distribution() cmd = test(dist) record = [] - cmd.runner = self.prepare_named_function(lambda: record.append("runner called")) + cmd.runner = self.prepare_named_function( + lambda: record.append("runner called")) cmd.ensure_finalized() cmd.run() self.assertEqual(["runner called"], record) def prepare_mock_ut2(self): class MockUTClass(object): - def __init__(*_, **__): pass - def discover(self): pass - def run(self, _): pass + def __init__(*_, **__): + pass + + def discover(self): + pass + + def run(self, _): + pass + class MockUTModule(object): TestLoader = MockUTClass TextTestRunner = MockUTClass + mock_ut2 = MockUTModule() - self.safely_replace(sys.modules, "unittest2", mock_ut2, dictionary=True) + self.safely_replace(sys.modules, "unittest2", + mock_ut2, dictionary=True) return mock_ut2 def test_gets_unittest_discovery(self): @@ -193,12 +209,13 @@ def test_calls_discover(self): self.safely_replace(ut1.TestLoader, "discover", delete=True) mock_ut2 = self.prepare_mock_ut2() - _RECORD[:] = [] - mock_ut2.TestLoader.discover = lambda self, path: _RECORD.append(path) + record = [] + mock_ut2.TestLoader.discover = lambda self, path: record.append(path) dist = Distribution() cmd = test(dist) cmd.run() - self.assertEqual(_RECORD, [os.curdir]) + self.assertEqual([os.curdir], record) + def test_suite(): return unittest.makeSuite(TestTest) diff --git a/distutils2/tests/test_command_upload.py b/distutils2/tests/test_command_upload.py --- a/distutils2/tests/test_command_upload.py +++ b/distutils2/tests/test_command_upload.py @@ -1,14 +1,18 @@ -# -*- encoding: utf-8 -*- -"""Tests for distutils.command.upload.""" +"""Tests for distutils2.command.upload.""" import os import sys from distutils2.command.upload import upload from distutils2.dist import Distribution -from distutils2.errors import DistutilsOptionError +from distutils2.errors import PackagingOptionError from distutils2.tests import unittest, support -from distutils2.tests.pypi_server import PyPIServer, PyPIServerTestCase +try: + import threading + from distutils2.tests.pypi_server import PyPIServerTestCase +except ImportError: + threading = None + PyPIServerTestCase = unittest.TestCase PYPIRC_NOPASSWORD = """\ @@ -40,9 +44,12 @@ """ -class UploadTestCase(support.TempdirManager, support.EnvironGuard, + at unittest.skipIf(threading is None, 'needs threading') +class UploadTestCase(support.TempdirManager, support.EnvironRestorer, support.LoggingCatcher, PyPIServerTestCase): + restore_environ = ['HOME'] + def setUp(self): super(UploadTestCase, self).setUp() self.tmp_dir = self.mkdtemp() @@ -60,13 +67,13 @@ ('repository', 'http://pypi.python.org/pypi')): self.assertEqual(getattr(cmd, attr), expected) - def test_finalize_options_unsigned_identity_yields_exception(self): + def test_finalize_options_unsigned_identity_raises_exception(self): self.write_file(self.rc, PYPIRC) dist = Distribution() cmd = upload(dist) cmd.identity = True cmd.sign = False - self.assertRaises(DistutilsOptionError, cmd.finalize_options) + self.assertRaises(PackagingOptionError, cmd.finalize_options) def test_saved_password(self): # file with no password @@ -85,19 +92,19 @@ cmd.finalize_options() self.assertEqual(cmd.password, 'xxx') - def test_upload_without_files_yields_exception(self): + def test_upload_without_files_raises_exception(self): dist = Distribution() cmd = upload(dist) - self.assertRaises(DistutilsOptionError, cmd.run) + self.assertRaises(PackagingOptionError, cmd.run) def test_upload(self): path = os.path.join(self.tmp_dir, 'xxx') self.write_file(path) - command, pyversion, filename = 'xxx', '2.6', path + command, pyversion, filename = 'xxx', '3.3', path dist_files = [(command, pyversion, filename)] # lets run it - pkg_dir, dist = self.create_dist(dist_files=dist_files, author=u'd??d??') + pkg_dir, dist = self.create_dist(dist_files=dist_files, author='d\xc3d\xc3') cmd = upload(dist) cmd.ensure_finalized() cmd.repository = self.pypi.full_address @@ -105,11 +112,12 @@ # what did we send ? handler, request_data = self.pypi.requests[-1] - headers = handler.headers.dict - self.assertIn('d??d??', request_data) - self.assertIn('xxx', request_data) + headers = handler.headers + #self.assertIn('d\xc3d\xc3', str(request_data)) + self.assertIn(b'xxx', request_data) + self.assertEqual(int(headers['content-length']), len(request_data)) - self.assertTrue(int(headers['content-length']) < 2000) + self.assertLess(int(headers['content-length']), 2500) self.assertTrue(headers['content-type'].startswith('multipart/form-data')) self.assertEqual(handler.command, 'POST') self.assertNotIn('\n', headers['authorization']) @@ -117,7 +125,7 @@ def test_upload_docs(self): path = os.path.join(self.tmp_dir, 'xxx') self.write_file(path) - command, pyversion, filename = 'xxx', '2.6', path + command, pyversion, filename = 'xxx', '3.3', path dist_files = [(command, pyversion, filename)] docs_path = os.path.join(self.tmp_dir, "build", "docs") os.makedirs(docs_path) @@ -125,26 +133,28 @@ self.write_file(self.rc, PYPIRC) # lets run it - pkg_dir, dist = self.create_dist(dist_files=dist_files, author=u'd??d??') + pkg_dir, dist = self.create_dist(dist_files=dist_files, author='d\xc3d\xc3') cmd = upload(dist) cmd.get_finalized_command("build").run() cmd.upload_docs = True cmd.ensure_finalized() cmd.repository = self.pypi.full_address + prev_dir = os.getcwd() try: - prev_dir = os.getcwd() os.chdir(self.tmp_dir) cmd.run() finally: os.chdir(prev_dir) handler, request_data = self.pypi.requests[-1] - action, name, content =\ - request_data.split("----------------GHSKFJDLGDS7543FJKLFHRE75642756743254")[1:4] + action, name, content = request_data.split( + "----------------GHSKFJDLGDS7543FJKLFHRE75642756743254" + .encode())[1:4] - self.assertIn('name=":action"', action) - self.assertIn("doc_upload", action) + self.assertIn(b'name=":action"', action) + self.assertIn(b'doc_upload', action) + def test_suite(): return unittest.makeSuite(UploadTestCase) diff --git a/distutils2/tests/test_command_upload_docs.py b/distutils2/tests/test_command_upload_docs.py --- a/distutils2/tests/test_command_upload_docs.py +++ b/distutils2/tests/test_command_upload_docs.py @@ -1,46 +1,27 @@ -# -*- encoding: utf-8 -*- -"""Tests for distutils.command.upload_docs.""" +"""Tests for distutils2.command.upload_docs.""" import os import sys -import httplib import shutil import zipfile try: - from cStringIO import StringIO + import _ssl except ImportError: - from StringIO import StringIO + _ssl = None from distutils2.command import upload_docs as upload_docs_mod -from distutils2.command.upload_docs import (upload_docs, zip_dir, - encode_multipart) +from distutils2.command.upload_docs import upload_docs, zip_dir from distutils2.dist import Distribution -from distutils2.errors import DistutilsFileError, DistutilsOptionError +from distutils2.errors import PackagingFileError, PackagingOptionError from distutils2.tests import unittest, support -from distutils2.tests.pypi_server import PyPIServer, PyPIServerTestCase +try: + import threading + from distutils2.tests.pypi_server import PyPIServerTestCase +except ImportError: + threading = None + PyPIServerTestCase = object -EXPECTED_MULTIPART_OUTPUT = "\r\n".join([ -'---x', -'Content-Disposition: form-data; name="a"', -'', -'b', -'---x', -'Content-Disposition: form-data; name="c"', -'', -'d', -'---x', -'Content-Disposition: form-data; name="e"; filename="f"', -'', -'g', -'---x', -'Content-Disposition: form-data; name="h"; filename="i"', -'', -'j', -'---x--', -'', -]) - PYPIRC = """\ [distutils] index-servers = server1 @@ -51,8 +32,14 @@ password = long_island """ -class UploadDocsTestCase(support.TempdirManager, support.EnvironGuard, - support.LoggingCatcher, PyPIServerTestCase): + + at unittest.skipIf(threading is None, "Needs threading") +class UploadDocsTestCase(support.TempdirManager, + support.EnvironRestorer, + support.LoggingCatcher, + PyPIServerTestCase): + + restore_environ = ['HOME'] def setUp(self): super(UploadDocsTestCase, self).setUp() @@ -103,13 +90,6 @@ zip_f = zipfile.ZipFile(compressed) self.assertEqual(zip_f.namelist(), ['index.html', 'docs/index.html']) - def test_encode_multipart(self): - fields = [("a", "b"), ("c", "d")] - files = [("e", "f", "g"), ("h", "i", "j")] - content_type, body = encode_multipart(fields, files, "-x") - self.assertEqual(content_type, "multipart/form-data; boundary=-x") - self.assertEqual(body, EXPECTED_MULTIPART_OUTPUT) - def prepare_command(self): self.cmd.upload_dir = self.prepare_sample_dir() self.cmd.ensure_finalized() @@ -123,61 +103,60 @@ self.assertEqual(len(self.pypi.requests), 1) handler, request_data = self.pypi.requests[-1] - self.assertIn("content", request_data) - self.assertIn("Basic", handler.headers.dict['authorization']) - self.assertTrue(handler.headers.dict['content-type'] + self.assertIn(b"content", request_data) + self.assertIn("Basic", handler.headers['authorization']) + self.assertTrue(handler.headers['content-type'] .startswith('multipart/form-data;')) action, name, version, content =\ - request_data.split("----------------GHSKFJDLGDS7543FJKLFHRE75642756743254")[1:5] + request_data.split("----------------GHSKFJDLGDS7543FJKLFHRE75642756743254".encode())[1:5] + # check that we picked the right chunks - self.assertIn('name=":action"', action) - self.assertIn('name="name"', name) - self.assertIn('name="version"', version) - self.assertIn('name="content"', content) + self.assertIn(b'name=":action"', action) + self.assertIn(b'name="name"', name) + self.assertIn(b'name="version"', version) + self.assertIn(b'name="content"', content) # check their contents - self.assertIn("doc_upload", action) - self.assertIn("distr-name", name) - self.assertIn("docs/index.html", content) - self.assertIn("Ce mortel ennui", content) + self.assertIn(b'doc_upload', action) + self.assertIn(b'distr-name', name) + self.assertIn(b'docs/index.html', content) + self.assertIn(b'Ce mortel ennui', content) + @unittest.skipIf(_ssl is None, 'Needs SSL support') def test_https_connection(self): - https_called = False + self.https_called = False + orig_https = upload_docs_mod.httplib.HTTPSConnection + def https_conn_wrapper(*args): - https_called = True - return upload_docs_mod.httplib.HTTPConnection(*args) # the testing server is http + self.https_called = True + # the testing server is http + return upload_docs_mod.httplib.HTTPConnection(*args) + upload_docs_mod.httplib.HTTPSConnection = https_conn_wrapper try: self.prepare_command() self.cmd.run() - self.assertFalse(https_called) + self.assertFalse(self.https_called) self.cmd.repository = self.cmd.repository.replace("http", "https") self.cmd.run() - self.assertFalse(https_called) + self.assertTrue(self.https_called) finally: upload_docs_mod.httplib.HTTPSConnection = orig_https def test_handling_response(self): - calls = [] - def aggr(*args): - calls.append(args) self.pypi.default_response_status = '403 Forbidden' self.prepare_command() - self.cmd.announce = aggr self.cmd.run() - message, _ = calls[-1] - self.assertIn('Upload failed (403): Forbidden', message) + self.assertIn('Upload failed (403): Forbidden', self.get_logs()[-1]) - calls = [] self.pypi.default_response_status = '301 Moved Permanently' self.pypi.default_response_headers.append(("Location", "brand_new_location")) self.cmd.run() - message = calls[-1][0] - self.assertIn('brand_new_location', message) + self.assertIn('brand_new_location', self.get_logs()[-1]) def test_reads_pypirc_data(self): self.write_file(self.rc, PYPIRC % self.pypi.full_address) @@ -190,19 +169,18 @@ def test_checks_index_html_presence(self): self.cmd.upload_dir = self.prepare_sample_dir() os.remove(os.path.join(self.cmd.upload_dir, "index.html")) - self.assertRaises(DistutilsFileError, self.cmd.ensure_finalized) + self.assertRaises(PackagingFileError, self.cmd.ensure_finalized) def test_checks_upload_dir(self): self.cmd.upload_dir = self.prepare_sample_dir() shutil.rmtree(os.path.join(self.cmd.upload_dir)) - self.assertRaises(DistutilsOptionError, self.cmd.ensure_finalized) + self.assertRaises(PackagingOptionError, self.cmd.ensure_finalized) def test_show_response(self): self.prepare_command() self.cmd.show_response = True self.cmd.run() - record = self.logs[-1][1] - + record = self.get_logs()[-1] self.assertTrue(record, "should report the response") self.assertIn(self.pypi.default_response_data, record) diff --git a/distutils2/tests/test_compiler.py b/distutils2/tests/test_compiler.py --- a/distutils2/tests/test_compiler.py +++ b/distutils2/tests/test_compiler.py @@ -2,7 +2,7 @@ import os from distutils2.compiler import (get_default_compiler, customize_compiler, - gen_lib_options) + gen_lib_options) from distutils2.tests import unittest, support @@ -17,26 +17,26 @@ def runtime_library_dir_option(self, dir): return ["-cool", "-R" + dir] - def find_library_file(self, dirs, lib, debug=0): + def find_library_file(self, dirs, lib, debug=False): return 'found' def library_option(self, lib): return "-l" + lib -class CompilerTestCase(support.EnvironGuard, unittest.TestCase): +class CompilerTestCase(support.EnvironRestorer, unittest.TestCase): + restore_environ = ['AR', 'ARFLAGS'] + + @unittest.skipUnless(get_default_compiler() == 'unix', + 'irrelevant if default compiler is not unix') def test_customize_compiler(self): - # not testing if default compiler is not unix - if get_default_compiler() != 'unix': - return - os.environ['AR'] = 'my_ar' os.environ['ARFLAGS'] = '-arflags' # make sure AR gets caught - class compiler: + class compiler(object): name = 'unix' def set_executables(self, **kw): diff --git a/distutils2/tests/test_config.py b/distutils2/tests/test_config.py --- a/distutils2/tests/test_config.py +++ b/distutils2/tests/test_config.py @@ -1,25 +1,30 @@ -# -*- encoding: utf-8 -*- -"""Tests for distutils.config.""" +"""Tests for distutils2.config.""" import os import sys +import logging from StringIO import StringIO -from distutils2.tests import unittest, support, run_unittest +from distutils2 import command +from distutils2.dist import Distribution +from distutils2.errors import PackagingFileError +from distutils2.compiler import new_compiler, _COMPILERS from distutils2.command.sdist import sdist -from distutils2.errors import DistutilsFileError +from distutils2.tests import unittest, support +from distutils2.tests.support import requires_zlib -SETUP_CFG = """ + +SETUP_CFG = u""" [metadata] name = RestingParrot version = 0.6.4 author = Carl Meyer author_email = carl at oddbird.net -maintainer = ??ric Araujo +maintainer = \xc3ric Araujo maintainer_email = merwok at netwok.org -summary = A sample project demonstrating distutils2 packaging +summary = A sample project demonstrating distutils2 description-file = %(description-file)s -keywords = distutils2, packaging, sample project +keywords = distutils2, sample project classifier = Development Status :: 4 - Beta @@ -85,7 +90,7 @@ compilers = distutils2.tests.test_config.DCompiler -setup_hook = distutils2.tests.test_config.hook +setup_hooks = %(setup-hooks)s @@ -109,7 +114,7 @@ GecodeInt GecodeKernel -- sys.platform == 'win32' [extension=fast_taunt] -name = three.fast_taunt +name = two.fast_taunt sources = cxx_src/utils_taunt.cxx cxx_src/python_module.cxx include_dirs = /usr/include/gecode @@ -121,6 +126,15 @@ """ +HOOKS_MODULE = """ +import logging + +logger = logging.getLogger('distutils2') + +def logging_hook(config): + logger.warning('logging_hook called') +""" + class DCompiler(object): name = 'd' @@ -129,8 +143,17 @@ def __init__(self, *args): pass -def hook(content): - content['metadata']['version'] += '.dev1' + +def version_hook(config): + config['metadata']['version'] += '.dev1' + + +def first_hook(config): + config['files']['modules'] += '\n first' + + +def third_hook(config): + config['files']['modules'] += '\n third' class FooBarBazTest(object): @@ -139,11 +162,11 @@ self.distribution = dist @classmethod - def get_command_name(self): + def get_command_name(cls): return 'foo' def run(self): - self.distribution.foo_was_here = 1 + self.distribution.foo_was_here = True def nothing(self): pass @@ -155,57 +178,53 @@ class ConfigTestCase(support.TempdirManager, + support.EnvironRestorer, support.LoggingCatcher, unittest.TestCase): + restore_environ = ['PLAT'] + def setUp(self): super(ConfigTestCase, self).setUp() self.addCleanup(setattr, sys, 'stdout', sys.stdout) self.addCleanup(setattr, sys, 'stderr', sys.stderr) - #sys.stdout = sys.stderr = StringIO() + sys.stdout = StringIO() + sys.stderr = StringIO() self.addCleanup(os.chdir, os.getcwd()) tempdir = self.mkdtemp() + self.working_dir = os.getcwd() os.chdir(tempdir) self.tempdir = tempdir - self.addCleanup(setattr, sys, 'argv', sys.argv) + def tearDown(self): + os.chdir(self.working_dir) + super(ConfigTestCase, self).tearDown() def write_setup(self, kwargs=None): - opts = {'description-file': 'README', 'extra-files':''} + opts = {'description-file': 'README', 'extra-files': '', + 'setup-hooks': 'distutils2.tests.test_config.version_hook'} if kwargs: opts.update(kwargs) - self.write_file('setup.cfg', SETUP_CFG % opts) + self.write_file('setup.cfg', SETUP_CFG % opts, encoding='utf-8') - - def run_setup(self, *args): - # run setup with args - args = ['run'] + list(args) - from distutils2.run import main - dist = main(args) - return dist - - def _get_metadata(self, name='version'): - from distutils2.dist import Distribution + def get_dist(self): dist = Distribution() dist.parse_config_files() - return dist, dist.metadata[name] + return dist def test_config(self): self.write_setup() self.write_file('README', 'yeah') os.mkdir('bm') - self.write_file(os.path.join('bm', 'b1.gif'), '') - self.write_file(os.path.join('bm', 'b2.gif'), '') + self.write_file(('bm', 'b1.gif'), '') + self.write_file(('bm', 'b2.gif'), '') os.mkdir('Cfg') - self.write_file(os.path.join('Cfg', 'data.CFG'), '') + self.write_file(('Cfg', 'data.CFG'), '') self.write_file('init_script', '') # try to load the metadata now - dist, version = self._get_metadata() - - # sanity check - self.assertEqual(version, '0.6.4.dev1') + dist = self.get_dist() # check what was done self.assertEqual(dist.metadata['Author'], 'Carl Meyer') @@ -214,16 +233,17 @@ # the hook adds .dev1 self.assertEqual(dist.metadata['Version'], '0.6.4.dev1') - wanted = ['Development Status :: 4 - Beta', - 'Environment :: Console (Text Based)', - "Environment :: X11 Applications :: GTK; python_version < '3'", - 'License :: OSI Approved :: MIT License', - 'Programming Language :: Python', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 3'] + wanted = [ + 'Development Status :: 4 - Beta', + 'Environment :: Console (Text Based)', + "Environment :: X11 Applications :: GTK; python_version < '3'", + 'License :: OSI Approved :: MIT License', + 'Programming Language :: Python', + 'Programming Language :: Python :: 2', + 'Programming Language :: Python :: 3'] self.assertEqual(dist.metadata['Classifier'], wanted) - wanted = ['distutils2', 'packaging', 'sample project'] + wanted = ['distutils2', 'sample project'] self.assertEqual(dist.metadata['Keywords'], wanted) self.assertEqual(dist.metadata['Requires-Python'], '>=2.4, <3.2') @@ -245,10 +265,10 @@ self.assertEqual(dist.py_modules, ['haven']) self.assertEqual(dist.package_data, {'cheese': 'data/templates/*'}) self.assertEqual( - {'bm/b1.gif' : '{icon}/b1.gif', - 'bm/b2.gif' : '{icon}/b2.gif', - 'Cfg/data.CFG' : '{config}/baBar/data.CFG', - 'init_script' : '{script}/JunGle/init_script'}, + {'bm/b1.gif': '{icon}/b1.gif', + 'bm/b2.gif': '{icon}/b2.gif', + 'Cfg/data.CFG': '{config}/baBar/data.CFG', + 'init_script': '{script}/JunGle/init_script'}, dist.data_files) self.assertEqual(dist.package_dir, 'src') @@ -260,37 +280,36 @@ # this file would be __main__.Foo when run as "python test_config.py". # The name FooBarBazTest should be unique enough to prevent # collisions. - self.assertEqual(dist.get_command_obj('foo').__class__.__name__, - 'FooBarBazTest') + self.assertEqual('FooBarBazTest', + dist.get_command_obj('foo').__class__.__name__) # did the README got loaded ? self.assertEqual(dist.metadata['description'], 'yeah') # do we have the D Compiler enabled ? - from distutils2.compiler import new_compiler, _COMPILERS self.assertIn('d', _COMPILERS) d = new_compiler(compiler='d') self.assertEqual(d.description, 'D Compiler') - def test_multiple_description_file(self): self.write_setup({'description-file': 'README CHANGES'}) self.write_file('README', 'yeah') self.write_file('CHANGES', 'changelog2') - dist, version = self._get_metadata() + dist = self.get_dist() self.assertEqual(dist.metadata.requires_files, ['README', 'CHANGES']) def test_multiline_description_file(self): self.write_setup({'description-file': 'README\n CHANGES'}) self.write_file('README', 'yeah') self.write_file('CHANGES', 'changelog') - dist, desc = self._get_metadata('description') - self.assertEqual(desc, 'yeah\nchangelog') + dist = self.get_dist() + self.assertEqual(dist.metadata['description'], 'yeah\nchangelog') self.assertEqual(dist.metadata.requires_files, ['README', 'CHANGES']) def test_parse_extensions_in_config(self): self.write_file('setup.cfg', EXT_SETUP_CFG) - dist, version = self._get_metadata() + dist = self.get_dist() + ext_modules = dict((mod.name, mod) for mod in dist.ext_modules) self.assertEqual(len(ext_modules), 2) ext = ext_modules.get('one.speed_coconuts') @@ -303,105 +322,152 @@ self.assertEqual(ext.extra_link_args, ['`gcc -print-file-name=libgcc.a`', '-shared']) - ext = ext_modules.get('three.fast_taunt') + ext = ext_modules.get('two.fast_taunt') self.assertEqual(ext.sources, ['cxx_src/utils_taunt.cxx', 'cxx_src/python_module.cxx']) self.assertEqual(ext.include_dirs, ['/usr/include/gecode', '/usr/include/blitz']) cargs = ['-fPIC', '-O2'] if sys.platform == 'win32': - cargs.append("/DGECODE_VERSION='win32'") + cargs.append("/DGECODE_VERSION=win32") else: cargs.append('-DGECODE_VERSION=$(./gecode_version)') self.assertEqual(ext.extra_compile_args, cargs) self.assertEqual(ext.language, 'cxx') + def test_project_setup_hook_works(self): + # Bug #11637: ensure the project directory is on sys.path to allow + # project-specific hooks + self.write_setup({'setup-hooks': 'hooks.logging_hook'}) + self.write_file('README', 'yeah') + self.write_file('hooks.py', HOOKS_MODULE) + self.get_dist() + logs = self.get_logs(logging.WARNING) + self.assertEqual(['logging_hook called'], logs) + self.assertIn('hooks', sys.modules) + + def test_missing_setup_hook_warns(self): + self.write_setup({'setup-hooks': 'this.does._not.exist'}) + self.write_file('README', 'yeah') + self.get_dist() + logs = self.get_logs(logging.WARNING) + self.assertEqual(1, len(logs)) + self.assertIn('cannot find setup hook', logs[0]) + + def test_multiple_setup_hooks(self): + self.write_setup({ + 'setup-hooks': '\n distutils2.tests.test_config.first_hook' + '\n distutils2.tests.test_config.missing_hook' + '\n distutils2.tests.test_config.third_hook' + }) + self.write_file('README', 'yeah') + dist = self.get_dist() + + self.assertEqual(['haven', 'first', 'third'], dist.py_modules) + logs = self.get_logs(logging.WARNING) + self.assertEqual(1, len(logs)) + self.assertIn('cannot find setup hook', logs[0]) def test_metadata_requires_description_files_missing(self): - self.write_setup({'description-file': 'README\n README2'}) + self.write_setup({'description-file': 'README README2'}) self.write_file('README', 'yeah') self.write_file('README2', 'yeah') - self.write_file('haven.py', '#') + os.mkdir('src') + self.write_file(('src', 'haven.py'), '#') self.write_file('script1.py', '#') os.mkdir('scripts') - self.write_file(os.path.join('scripts', 'find-coconuts'), '#') + self.write_file(('scripts', 'find-coconuts'), '#') os.mkdir('bin') - self.write_file(os.path.join('bin', 'taunt'), '#') + self.write_file(('bin', 'taunt'), '#') - os.mkdir('src') for pkg in ('one', 'two', 'three'): pkg = os.path.join('src', pkg) os.mkdir(pkg) - self.write_file(os.path.join(pkg, '__init__.py'), '#') + self.write_file((pkg, '__init__.py'), '#') - dist, version = self._get_metadata() + dist = self.get_dist() cmd = sdist(dist) cmd.finalize_options() cmd.get_file_list() - self.assertRaises(DistutilsFileError, cmd.make_distribution) + self.assertRaises(PackagingFileError, cmd.make_distribution) + @requires_zlib def test_metadata_requires_description_files(self): + # Create the following file structure: + # README + # README2 + # script1.py + # scripts/ + # find-coconuts + # bin/ + # taunt + # src/ + # haven.py + # one/__init__.py + # two/__init__.py + # three/__init__.py + self.write_setup({'description-file': 'README\n README2', - 'extra-files':'\n README2'}) - self.write_file('README', 'yeah') - self.write_file('README2', 'yeah') - self.write_file('haven.py', '#') + 'extra-files': '\n README3'}) + self.write_file('README', 'yeah 1') + self.write_file('README2', 'yeah 2') + self.write_file('README3', 'yeah 3') + os.mkdir('src') + self.write_file(('src', 'haven.py'), '#') self.write_file('script1.py', '#') os.mkdir('scripts') - self.write_file(os.path.join('scripts', 'find-coconuts'), '#') + self.write_file(('scripts', 'find-coconuts'), '#') os.mkdir('bin') - self.write_file(os.path.join('bin', 'taunt'), '#') + self.write_file(('bin', 'taunt'), '#') - os.mkdir('src') for pkg in ('one', 'two', 'three'): pkg = os.path.join('src', pkg) os.mkdir(pkg) - self.write_file(os.path.join(pkg, '__init__.py'), '#') + self.write_file((pkg, '__init__.py'), '#') - dist, desc = self._get_metadata('description') - self.assertIn('yeah\nyeah', desc) + dist = self.get_dist() + self.assertIn('yeah 1\nyeah 2', dist.metadata['description']) cmd = sdist(dist) cmd.finalize_options() cmd.get_file_list() - self.assertRaises(DistutilsFileError, cmd.make_distribution) + self.assertRaises(PackagingFileError, cmd.make_distribution) self.write_setup({'description-file': 'README\n README2', 'extra-files': '\n README2\n README'}) - dist, desc = self._get_metadata('description') - + dist = self.get_dist() cmd = sdist(dist) cmd.finalize_options() cmd.get_file_list() cmd.make_distribution() - self.assertIn('README\nREADME2\n', open('MANIFEST').read()) + with open('MANIFEST') as fp: + self.assertIn('README\nREADME2\n', fp.read()) def test_sub_commands(self): self.write_setup() self.write_file('README', 'yeah') - self.write_file('haven.py', '#') + os.mkdir('src') + self.write_file(('src', 'haven.py'), '#') self.write_file('script1.py', '#') os.mkdir('scripts') - self.write_file(os.path.join('scripts', 'find-coconuts'), '#') + self.write_file(('scripts', 'find-coconuts'), '#') os.mkdir('bin') - self.write_file(os.path.join('bin', 'taunt'), '#') - os.mkdir('src') + self.write_file(('bin', 'taunt'), '#') for pkg in ('one', 'two', 'three'): pkg = os.path.join('src', pkg) os.mkdir(pkg) - self.write_file(os.path.join(pkg, '__init__.py'), '#') + self.write_file((pkg, '__init__.py'), '#') # try to run the install command to see if foo is called - from distutils2.dist import Distribution - dist = Distribution() - dist.parse_config_files() - dist.run_command('install_dist') - self.assertEqual(dist.foo_was_here, 1) + dist = self.get_dist() + self.assertIn('foo', command.get_command_names()) + self.assertEqual('FooBarBazTest', + dist.get_command_obj('foo').__class__.__name__) def test_suite(): return unittest.makeSuite(ConfigTestCase) if __name__ == '__main__': - run_unittest(test_suite()) + unittest.main(defaultTest='test_suite') diff --git a/distutils2/tests/test_create.py b/distutils2/tests/test_create.py new file mode 100644 --- /dev/null +++ b/distutils2/tests/test_create.py @@ -0,0 +1,243 @@ +"""Tests for distutils2.create.""" +from StringIO import StringIO +import os +import sys +import sysconfig +from textwrap import dedent +from distutils2.create import MainProgram, ask_yn, ask, main + +from distutils2.tests import support, unittest + + +class CreateTestCase(support.TempdirManager, + support.EnvironRestorer, + unittest.TestCase): + + maxDiff = None + restore_environ = ['PLAT'] + + def setUp(self): + super(CreateTestCase, self).setUp() + self._stdin = sys.stdin # TODO use Inputs + self._stdout = sys.stdout + sys.stdin = StringIO() + sys.stdout = StringIO() + self._cwd = os.getcwd() + self.wdir = self.mkdtemp() + os.chdir(self.wdir) + # patch sysconfig + self._old_get_paths = sysconfig.get_paths + sysconfig.get_paths = lambda *args, **kwargs: { + 'man': sys.prefix + '/share/man', + 'doc': sys.prefix + '/share/doc/pyxfoil', } + + def tearDown(self): + sys.stdin = self._stdin + sys.stdout = self._stdout + os.chdir(self._cwd) + sysconfig.get_paths = self._old_get_paths + super(CreateTestCase, self).tearDown() + + def test_ask_yn(self): + sys.stdin.write(u'y\n') + sys.stdin.seek(0) + self.assertEqual('y', ask_yn('is this a test')) + + def test_ask(self): + sys.stdin.write(u'a\n') + sys.stdin.write(u'b\n') + sys.stdin.seek(0) + self.assertEqual('a', ask('is this a test')) + self.assertEqual('b', ask(str(list(range(0, 70))), default='c', + lengthy=True)) + + def test_set_multi(self): + mainprogram = MainProgram() + sys.stdin.write(u'aaaaa\n') + sys.stdin.seek(0) + mainprogram.data['author'] = [] + mainprogram._set_multi('_set_multi test', 'author') + self.assertEqual(['aaaaa'], mainprogram.data['author']) + + def test_find_files(self): + # making sure we scan a project dir correctly + mainprogram = MainProgram() + + # building the structure + tempdir = self.wdir + dirs = ['pkg1', 'data', 'pkg2', 'pkg2/sub'] + files = [ + 'README', + 'data/data1', + 'foo.py', + 'pkg1/__init__.py', + 'pkg1/bar.py', + 'pkg2/__init__.py', + 'pkg2/sub/__init__.py', + ] + + for dir_ in dirs: + os.mkdir(os.path.join(tempdir, dir_)) + + for file_ in files: + path = os.path.join(tempdir, file_) + self.write_file(path, 'xxx') + + mainprogram._find_files() + mainprogram.data['packages'].sort() + + # do we have what we want? + self.assertEqual(mainprogram.data['packages'], + ['pkg1', 'pkg2', 'pkg2.sub']) + self.assertEqual(mainprogram.data['modules'], ['foo']) + data_fn = os.path.join('data', 'data1') + self.assertEqual(mainprogram.data['extra_files'], + ['README', data_fn]) + + def test_convert_setup_py_to_cfg(self): + self.write_file((self.wdir, 'setup.py'), + dedent(u""" + # coding: utf-8 + from distutils.core import setup + + long_description = '''My super Death-scription + barbar is now on the public domain, + ho, baby !''' + + setup(name='pyxfoil', + version='0.2', + description='Python bindings for the Xfoil engine', + long_description=long_description, + maintainer='Andr\xc3 Espaze', + maintainer_email='andre.espaze at logilab.fr', + url='http://www.python-science.org/project/pyxfoil', + license='GPLv2', + packages=['pyxfoil', 'babar', 'me'], + data_files=[ + ('share/doc/pyxfoil', ['README.rst']), + ('share/man', ['pyxfoil.1']), + ], + py_modules=['my_lib', 'mymodule'], + package_dir={ + 'babar': '', + 'me': 'Martinique/Lamentin', + }, + package_data={ + 'babar': ['Pom', 'Flora', 'Alexander'], + 'me': ['dady', 'mumy', 'sys', 'bro'], + '': ['setup.py', 'README'], + 'pyxfoil': ['fengine.so'], + }, + scripts=['my_script', 'bin/run'], + ) + """), encoding='utf-8') + sys.stdin.write(u'y\n') + sys.stdin.seek(0) + main() + + with open(os.path.join(self.wdir, 'setup.cfg'), encoding='utf-8') as fp: + contents = fp.read() + + self.assertEqual(contents, dedent(u"""\ + [metadata] + name = pyxfoil + version = 0.2 + summary = Python bindings for the Xfoil engine + download_url = UNKNOWN + home_page = http://www.python-science.org/project/pyxfoil + maintainer = Andr\xc3 Espaze + maintainer_email = andre.espaze at logilab.fr + description = My super Death-scription + |barbar is now on the public domain, + |ho, baby ! + + [files] + packages = pyxfoil + babar + me + modules = my_lib + mymodule + scripts = my_script + bin/run + extra_files = Martinique/Lamentin/dady + Martinique/Lamentin/mumy + Martinique/Lamentin/sys + Martinique/Lamentin/bro + setup.py + README + Pom + Flora + Alexander + pyxfoil/fengine.so + + resources = + README.rst = {doc} + pyxfoil.1 = {man} + + """)) + + def test_convert_setup_py_to_cfg_with_description_in_readme(self): + self.write_file((self.wdir, 'setup.py'), + dedent(u""" + # coding: utf-8 + from distutils.core import setup + with open('README.txt') as fp: + long_description = fp.read() + + setup(name='pyxfoil', + version='0.2', + description='Python bindings for the Xfoil engine', + long_description=long_description, + maintainer='Andr\xc3 Espaze', + maintainer_email='andre.espaze at logilab.fr', + url='http://www.python-science.org/project/pyxfoil', + license='GPLv2', + packages=['pyxfoil'], + package_data={'pyxfoil': ['fengine.so', 'babar.so']}, + data_files=[ + ('share/doc/pyxfoil', ['README.rst']), + ('share/man', ['pyxfoil.1']), + ], + ) + """), encoding='utf-8') + self.write_file((self.wdir, 'README.txt'), + dedent(''' +My super Death-scription +barbar is now in the public domain, +ho, baby! + ''')) + sys.stdin.write(u'y\n') + sys.stdin.seek(0) + # FIXME Out of memory error. + main() + with open(os.path.join(self.wdir, 'setup.cfg'), encoding='utf-8') as fp: + contents = fp.read() + + self.assertEqual(contents, dedent(u"""\ + [metadata] + name = pyxfoil + version = 0.2 + summary = Python bindings for the Xfoil engine + download_url = UNKNOWN + home_page = http://www.python-science.org/project/pyxfoil + maintainer = Andr\xc3 Espaze + maintainer_email = andre.espaze at logilab.fr + description-file = README.txt + + [files] + packages = pyxfoil + extra_files = pyxfoil/fengine.so + pyxfoil/babar.so + + resources = + README.rst = {doc} + pyxfoil.1 = {man} + + """)) + + +def test_suite(): + return unittest.makeSuite(CreateTestCase) + +if __name__ == '__main__': + unittest.main(defaultTest='test_suite') diff --git a/distutils2/tests/test_cygwinccompiler.py b/distutils2/tests/test_cygwinccompiler.py --- a/distutils2/tests/test_cygwinccompiler.py +++ b/distutils2/tests/test_cygwinccompiler.py @@ -1,18 +1,13 @@ -"""Tests for distutils.cygwinccompiler.""" +"""Tests for distutils2.cygwinccompiler.""" +import os import sys -import os +import sysconfig +from distutils2.compiler.cygwinccompiler import ( + check_config_h, get_msvcr, + CONFIG_H_OK, CONFIG_H_NOTOK, CONFIG_H_UNCERTAIN) -from distutils2._backport import sysconfig +from distutils2.tests import unittest, support -from distutils2.tests import run_unittest -from distutils2.tests import captured_stdout - -from distutils2.compiler import cygwinccompiler -from distutils2.compiler.cygwinccompiler import ( - CygwinCCompiler, check_config_h, get_msvcr, - CONFIG_H_OK, CONFIG_H_NOTOK, CONFIG_H_UNCERTAIN) -from distutils2.util import get_compiler_versions -from distutils2.tests import unittest, support class CygwinCCompilerTestCase(support.TempdirManager, unittest.TestCase): @@ -33,7 +28,6 @@ return self.python_h def test_check_config_h(self): - # check_config_h looks for "GCC" in sys.version first # returns CONFIG_H_OK if found sys.version = ('2.6.1 (r261:67515, Dec 6 2008, 16:42:21) \n[GCC ' @@ -56,10 +50,9 @@ self.assertEqual(check_config_h()[0], CONFIG_H_OK) def test_get_msvcr(self): - # none - sys.version = ('2.6.1 (r261:67515, Dec 6 2008, 16:42:21) ' - '\n[GCC 4.0.1 (Apple Computer, Inc. build 5370)]') + sys.version = ('2.6.1 (r261:67515, Dec 6 2008, 16:42:21) ' + '\n[GCC 4.0.1 (Apple Computer, Inc. build 5370)]') self.assertEqual(get_msvcr(), None) # MSVC 7.0 @@ -92,4 +85,4 @@ return unittest.makeSuite(CygwinCCompilerTestCase) if __name__ == '__main__': - run_unittest(test_suite()) + unittest.main(defaultTest='test_suite') diff --git a/distutils2/tests/test_database.py b/distutils2/tests/test_database.py new file mode 100644 --- /dev/null +++ b/distutils2/tests/test_database.py @@ -0,0 +1,674 @@ +import os +import csv +import sys +import shutil +import tempfile +from os.path import relpath # separate import for backport concerns +from hashlib import md5 +from textwrap import dedent + +from distutils2.tests.test_util import GlobTestCaseBase +from distutils2.tests.support import requires_zlib + +from distutils2.config import get_resources_dests +from distutils2.errors import PackagingError +from distutils2.metadata import Metadata +from distutils2.tests import unittest, support +from distutils2.database import ( + Distribution, EggInfoDistribution, get_distribution, get_distributions, + provides_distribution, obsoletes_distribution, get_file_users, + enable_cache, disable_cache, distinfo_dirname, _yield_distributions, + get_file, get_file_path) + +# TODO Add a test for getting a distribution provided by another distribution +# TODO Add a test for absolute pathed RECORD items (e.g. /etc/myapp/config.ini) +# TODO Add tests from the former pep376 project (zipped site-packages, etc.) + + +def get_hexdigest(filename): + with open(filename, 'rb') as file: + checksum = md5(file.read()) + return checksum.hexdigest() + + +def record_pieces(file): + path = relpath(file, sys.prefix) + digest = get_hexdigest(file) + size = os.path.getsize(file) + return [path, digest, size] + + +class FakeDistsMixin(object): + + def setUp(self): + super(FakeDistsMixin, self).setUp() + self.addCleanup(enable_cache) + disable_cache() + + # make a copy that we can write into for our fake installed + # distributions + tmpdir = tempfile.mkdtemp() + self.addCleanup(shutil.rmtree, tmpdir) + self.fake_dists_path = os.path.join(tmpdir, 'fake_dists') + fake_dists_src = os.path.abspath( + os.path.join(os.path.dirname(__file__), 'fake_dists')) + shutil.copytree(fake_dists_src, self.fake_dists_path) + # XXX ugly workaround: revert copystat calls done by shutil behind our + # back (to avoid getting a read-only copy of a read-only file). we + # could pass a custom copy_function to change the mode of files, but + # shutil gives no control over the mode of directories :( + for root, dirs, files in os.walk(self.fake_dists_path): + os.chmod(root, 0o755) + for f in files: + os.chmod(os.path.join(root, f), 0o644) + for d in dirs: + os.chmod(os.path.join(root, d), 0o755) + + +class CommonDistributionTests(FakeDistsMixin): + """Mixin used to test the interface common to both Distribution classes. + + Derived classes define cls, sample_dist, dirs and records. These + attributes are used in test methods. See source code for details. + """ + + def test_instantiation(self): + # check that useful attributes are here + name, version, distdir = self.sample_dist + here = os.path.abspath(os.path.dirname(__file__)) + dist_path = os.path.join(here, 'fake_dists', distdir) + + dist = self.dist = self.cls(dist_path) + self.assertEqual(dist.path, dist_path) + self.assertEqual(dist.name, name) + self.assertEqual(dist.metadata['Name'], name) + self.assertIsInstance(dist.metadata, Metadata) + self.assertEqual(dist.version, version) + self.assertEqual(dist.metadata['Version'], version) + + @requires_zlib + def test_repr(self): + dist = self.cls(self.dirs[0]) + # just check that the class name is in the repr + self.assertIn(self.cls.__name__, repr(dist)) + + @requires_zlib + def test_comparison(self): + # tests for __eq__ and __hash__ + dist = self.cls(self.dirs[0]) + dist2 = self.cls(self.dirs[0]) + dist3 = self.cls(self.dirs[1]) + self.assertIn(dist, {dist: True}) + self.assertEqual(dist, dist) + + self.assertIsNot(dist, dist2) + self.assertEqual(dist, dist2) + self.assertNotEqual(dist, dist3) + self.assertNotEqual(dist, ()) + + def test_list_installed_files(self): + for dir_ in self.dirs: + dist = self.cls(dir_) + for path, md5_, size in dist.list_installed_files(): + record_data = self.records[dist.path] + self.assertIn(path, record_data) + self.assertEqual(md5_, record_data[path][0]) + self.assertEqual(size, record_data[path][1]) + + +class TestDistribution(CommonDistributionTests, unittest.TestCase): + + cls = Distribution + sample_dist = 'choxie', '2.0.0.9', 'choxie-2.0.0.9.dist-info' + + def setUp(self): + super(TestDistribution, self).setUp() + self.dirs = [os.path.join(self.fake_dists_path, f) + for f in os.listdir(self.fake_dists_path) + if f.endswith('.dist-info')] + + self.records = {} + for distinfo_dir in self.dirs: + + record_file = os.path.join(distinfo_dir, 'RECORD') + with open(record_file, 'w') as file: + record_writer = csv.writer( + file, delimiter=',', quoting=csv.QUOTE_NONE, + lineterminator='\n') + + dist_location = distinfo_dir.replace('.dist-info', '') + + for path, dirs, files in os.walk(dist_location): + for f in files: + record_writer.writerow(record_pieces( + os.path.join(path, f))) + for file in ('INSTALLER', 'METADATA', 'REQUESTED'): + record_writer.writerow(record_pieces( + os.path.join(distinfo_dir, file))) + record_writer.writerow([relpath(record_file, sys.prefix)]) + + with open(record_file) as file: + record_reader = csv.reader(file, lineterminator='\n') + record_data = {} + for row in record_reader: + if row == []: + continue + path, md5_, size = (row[:] + + [None for i in range(len(row), 3)]) + record_data[path] = md5_, size + self.records[distinfo_dir] = record_data + + def test_instantiation(self): + super(TestDistribution, self).test_instantiation() + self.assertIsInstance(self.dist.requested, bool) + + def test_uses(self): + # Test to determine if a distribution uses a specified file. + # Criteria to test against + distinfo_name = 'grammar-1.0a4' + distinfo_dir = os.path.join(self.fake_dists_path, + distinfo_name + '.dist-info') + true_path = [self.fake_dists_path, distinfo_name, + 'grammar', 'utils.py'] + true_path = relpath(os.path.join(*true_path), sys.prefix) + false_path = [self.fake_dists_path, 'towel_stuff-0.1', 'towel_stuff', + '__init__.py'] + false_path = relpath(os.path.join(*false_path), sys.prefix) + + # Test if the distribution uses the file in question + dist = Distribution(distinfo_dir) + self.assertTrue(dist.uses(true_path)) + self.assertFalse(dist.uses(false_path)) + + def test_get_distinfo_file(self): + # Test the retrieval of dist-info file objects. + distinfo_name = 'choxie-2.0.0.9' + other_distinfo_name = 'grammar-1.0a4' + distinfo_dir = os.path.join(self.fake_dists_path, + distinfo_name + '.dist-info') + dist = Distribution(distinfo_dir) + # Test for known good file matches + distinfo_files = [ + # Relative paths + 'INSTALLER', 'METADATA', + # Absolute paths + os.path.join(distinfo_dir, 'RECORD'), + os.path.join(distinfo_dir, 'REQUESTED'), + ] + + for distfile in distinfo_files: + with dist.get_distinfo_file(distfile) as value: + self.assertIsInstance(value, file) + # Is it the correct file? + self.assertEqual(value.name, + os.path.join(distinfo_dir, distfile)) + + # Test an absolute path that is part of another distributions dist-info + other_distinfo_file = os.path.join( + self.fake_dists_path, other_distinfo_name + '.dist-info', + 'REQUESTED') + self.assertRaises(PackagingError, dist.get_distinfo_file, + other_distinfo_file) + # Test for a file that should not exist + self.assertRaises(PackagingError, dist.get_distinfo_file, + 'MAGICFILE') + + def test_list_distinfo_files(self): + # Test for the iteration of RECORD path entries. + distinfo_name = 'towel_stuff-0.1' + distinfo_dir = os.path.join(self.fake_dists_path, + distinfo_name + '.dist-info') + dist = Distribution(distinfo_dir) + # Test for the iteration of the raw path + distinfo_record_paths = self.records[distinfo_dir].keys() + found = dist.list_distinfo_files() + self.assertEqual(sorted(found), sorted(distinfo_record_paths)) + # Test for the iteration of local absolute paths + distinfo_record_paths = [os.path.join(sys.prefix, path) + for path in self.records[distinfo_dir]] + found = dist.list_distinfo_files(local=True) + self.assertEqual(sorted(found), sorted(distinfo_record_paths)) + + def test_get_resources_path(self): + distinfo_name = 'babar-0.1' + distinfo_dir = os.path.join(self.fake_dists_path, + distinfo_name + '.dist-info') + dist = Distribution(distinfo_dir) + resource_path = dist.get_resource_path('babar.png') + self.assertEqual(resource_path, 'babar.png') + self.assertRaises(KeyError, dist.get_resource_path, 'notexist') + + +class TestEggInfoDistribution(CommonDistributionTests, + support.LoggingCatcher, + unittest.TestCase): + + cls = EggInfoDistribution + sample_dist = 'bacon', '0.1', 'bacon-0.1.egg-info' + + def setUp(self): + super(TestEggInfoDistribution, self).setUp() + + self.dirs = [os.path.join(self.fake_dists_path, f) + for f in os.listdir(self.fake_dists_path) + if f.endswith('.egg') or f.endswith('.egg-info')] + + self.records = {} + + @unittest.skip('not implemented yet') + def test_list_installed_files(self): + # EggInfoDistribution defines list_installed_files but there is no + # test for it yet; someone with setuptools expertise needs to add a + # file with the list of installed files for one of the egg fake dists + # and write the support code to populate self.records (and then delete + # this method) + pass + + +class TestDatabase(support.LoggingCatcher, + FakeDistsMixin, + unittest.TestCase): + + def setUp(self): + super(TestDatabase, self).setUp() + sys.path.insert(0, self.fake_dists_path) + self.addCleanup(sys.path.remove, self.fake_dists_path) + + def test_distinfo_dirname(self): + # Given a name and a version, we expect the distinfo_dirname function + # to return a standard distribution information directory name. + + items = [ + # (name, version, standard_dirname) + # Test for a very simple single word name and decimal version + # number + ('docutils', '0.5', 'docutils-0.5.dist-info'), + # Test for another except this time with a '-' in the name, which + # needs to be transformed during the name lookup + ('python-ldap', '2.5', 'python_ldap-2.5.dist-info'), + # Test for both '-' in the name and a funky version number + ('python-ldap', '2.5 a---5', 'python_ldap-2.5 a---5.dist-info'), + ] + + # Loop through the items to validate the results + for name, version, standard_dirname in items: + dirname = distinfo_dirname(name, version) + self.assertEqual(dirname, standard_dirname) + + @requires_zlib + def test_get_distributions(self): + # Lookup all distributions found in the ``sys.path``. + # This test could potentially pick up other installed distributions + fake_dists = [('grammar', '1.0a4'), ('choxie', '2.0.0.9'), + ('towel-stuff', '0.1'), ('babar', '0.1')] + found_dists = [] + + # Verify the fake dists have been found. + dists = [dist for dist in get_distributions()] + for dist in dists: + self.assertIsInstance(dist, Distribution) + if (dist.name in dict(fake_dists) and + dist.path.startswith(self.fake_dists_path)): + found_dists.append((dist.name, dist.version)) + else: + # check that it doesn't find anything more than this + self.assertFalse(dist.path.startswith(self.fake_dists_path)) + # otherwise we don't care what other distributions are found + + # Finally, test that we found all that we were looking for + self.assertEqual(sorted(found_dists), sorted(fake_dists)) + + # Now, test if the egg-info distributions are found correctly as well + fake_dists += [('bacon', '0.1'), ('cheese', '2.0.2'), + ('coconuts-aster', '10.3'), + ('banana', '0.4'), ('strawberry', '0.6'), + ('truffles', '5.0'), ('nut', 'funkyversion')] + found_dists = [] + + dists = [dist for dist in get_distributions(use_egg_info=True)] + for dist in dists: + self.assertIsInstance(dist, (Distribution, EggInfoDistribution)) + if (dist.name in dict(fake_dists) and + dist.path.startswith(self.fake_dists_path)): + found_dists.append((dist.name, dist.version)) + else: + self.assertFalse(dist.path.startswith(self.fake_dists_path)) + + self.assertEqual(sorted(fake_dists), sorted(found_dists)) + + @requires_zlib + def test_get_distribution(self): + # Test for looking up a distribution by name. + # Test the lookup of the towel-stuff distribution + name = 'towel-stuff' # Note: This is different from the directory name + + # Lookup the distribution + dist = get_distribution(name) + self.assertIsInstance(dist, Distribution) + self.assertEqual(dist.name, name) + + # Verify that an unknown distribution returns None + self.assertIsNone(get_distribution('bogus')) + + # Verify partial name matching doesn't work + self.assertIsNone(get_distribution('towel')) + + # Verify that it does not find egg-info distributions, when not + # instructed to + self.assertIsNone(get_distribution('bacon')) + self.assertIsNone(get_distribution('cheese')) + self.assertIsNone(get_distribution('strawberry')) + self.assertIsNone(get_distribution('banana')) + + # Now check that it works well in both situations, when egg-info + # is a file and directory respectively. + dist = get_distribution('cheese', use_egg_info=True) + self.assertIsInstance(dist, EggInfoDistribution) + self.assertEqual(dist.name, 'cheese') + + dist = get_distribution('bacon', use_egg_info=True) + self.assertIsInstance(dist, EggInfoDistribution) + self.assertEqual(dist.name, 'bacon') + + dist = get_distribution('banana', use_egg_info=True) + self.assertIsInstance(dist, EggInfoDistribution) + self.assertEqual(dist.name, 'banana') + + dist = get_distribution('strawberry', use_egg_info=True) + self.assertIsInstance(dist, EggInfoDistribution) + self.assertEqual(dist.name, 'strawberry') + + def test_get_file_users(self): + # Test the iteration of distributions that use a file. + name = 'towel_stuff-0.1' + path = os.path.join(self.fake_dists_path, name, + 'towel_stuff', '__init__.py') + for dist in get_file_users(path): + self.assertIsInstance(dist, Distribution) + self.assertEqual(dist.name, name) + + @requires_zlib + def test_provides(self): + # Test for looking up distributions by what they provide + checkLists = lambda x, y: self.assertEqual(sorted(x), sorted(y)) + + l = [dist.name for dist in provides_distribution('truffles')] + checkLists(l, ['choxie', 'towel-stuff']) + + l = [dist.name for dist in provides_distribution('truffles', '1.0')] + checkLists(l, ['choxie']) + + l = [dist.name for dist in provides_distribution('truffles', '1.0', + use_egg_info=True)] + checkLists(l, ['choxie', 'cheese']) + + l = [dist.name for dist in provides_distribution('truffles', '1.1.2')] + checkLists(l, ['towel-stuff']) + + l = [dist.name for dist in provides_distribution('truffles', '1.1')] + checkLists(l, ['towel-stuff']) + + l = [dist.name for dist in provides_distribution('truffles', + '!=1.1,<=2.0')] + checkLists(l, ['choxie']) + + l = [dist.name for dist in provides_distribution('truffles', + '!=1.1,<=2.0', + use_egg_info=True)] + checkLists(l, ['choxie', 'bacon', 'cheese']) + + l = [dist.name for dist in provides_distribution('truffles', '>1.0')] + checkLists(l, ['towel-stuff']) + + l = [dist.name for dist in provides_distribution('truffles', '>1.5')] + checkLists(l, []) + + l = [dist.name for dist in provides_distribution('truffles', '>1.5', + use_egg_info=True)] + checkLists(l, ['bacon']) + + l = [dist.name for dist in provides_distribution('truffles', '>=1.0')] + checkLists(l, ['choxie', 'towel-stuff']) + + l = [dist.name for dist in provides_distribution('strawberry', '0.6', + use_egg_info=True)] + checkLists(l, ['coconuts-aster']) + + l = [dist.name for dist in provides_distribution('strawberry', '>=0.5', + use_egg_info=True)] + checkLists(l, ['coconuts-aster']) + + l = [dist.name for dist in provides_distribution('strawberry', '>0.6', + use_egg_info=True)] + checkLists(l, []) + + l = [dist.name for dist in provides_distribution('banana', '0.4', + use_egg_info=True)] + checkLists(l, ['coconuts-aster']) + + l = [dist.name for dist in provides_distribution('banana', '>=0.3', + use_egg_info=True)] + checkLists(l, ['coconuts-aster']) + + l = [dist.name for dist in provides_distribution('banana', '!=0.4', + use_egg_info=True)] + checkLists(l, []) + + @requires_zlib + def test_obsoletes(self): + # Test looking for distributions based on what they obsolete + checkLists = lambda x, y: self.assertEqual(sorted(x), sorted(y)) + + l = [dist.name for dist in obsoletes_distribution('truffles', '1.0')] + checkLists(l, []) + + l = [dist.name for dist in obsoletes_distribution('truffles', '1.0', + use_egg_info=True)] + checkLists(l, ['cheese', 'bacon']) + + l = [dist.name for dist in obsoletes_distribution('truffles', '0.8')] + checkLists(l, ['choxie']) + + l = [dist.name for dist in obsoletes_distribution('truffles', '0.8', + use_egg_info=True)] + checkLists(l, ['choxie', 'cheese']) + + l = [dist.name for dist in obsoletes_distribution('truffles', '0.9.6')] + checkLists(l, ['choxie', 'towel-stuff']) + + l = [dist.name for dist in obsoletes_distribution('truffles', + '0.5.2.3')] + checkLists(l, ['choxie', 'towel-stuff']) + + l = [dist.name for dist in obsoletes_distribution('truffles', '0.2')] + checkLists(l, ['towel-stuff']) + + @requires_zlib + def test_yield_distribution(self): + # tests the internal function _yield_distributions + checkLists = lambda x, y: self.assertEqual(sorted(x), sorted(y)) + + eggs = [('bacon', '0.1'), ('banana', '0.4'), ('strawberry', '0.6'), + ('truffles', '5.0'), ('cheese', '2.0.2'), + ('coconuts-aster', '10.3'), ('nut', 'funkyversion')] + dists = [('choxie', '2.0.0.9'), ('grammar', '1.0a4'), + ('towel-stuff', '0.1'), ('babar', '0.1')] + + checkLists([], _yield_distributions(False, False, sys.path)) + + found = [(dist.name, dist.version) + for dist in _yield_distributions(False, True, sys.path) + if dist.path.startswith(self.fake_dists_path)] + checkLists(eggs, found) + + found = [(dist.name, dist.version) + for dist in _yield_distributions(True, False, sys.path) + if dist.path.startswith(self.fake_dists_path)] + checkLists(dists, found) + + found = [(dist.name, dist.version) + for dist in _yield_distributions(True, True, sys.path) + if dist.path.startswith(self.fake_dists_path)] + checkLists(dists + eggs, found) + + +class DataFilesTestCase(GlobTestCaseBase): + + def assertRulesMatch(self, rules, spec): + tempdir = self.build_files_tree(spec) + expected = self.clean_tree(spec) + result = get_resources_dests(tempdir, rules) + self.assertEqual(expected, result) + + def clean_tree(self, spec): + files = {} + for path, value in spec.items(): + if value is not None: + files[path] = value + return files + + def test_simple_glob(self): + rules = [('', '*.tpl', '{data}')] + spec = {'coucou.tpl': '{data}/coucou.tpl', + 'Donotwant': None} + self.assertRulesMatch(rules, spec) + + def test_multiple_match(self): + rules = [('scripts', '*.bin', '{appdata}'), + ('scripts', '*', '{appscript}')] + spec = {'scripts/script.bin': '{appscript}/script.bin', + 'Babarlikestrawberry': None} + self.assertRulesMatch(rules, spec) + + def test_set_match(self): + rules = [('scripts', '*.{bin,sh}', '{appscript}')] + spec = {'scripts/script.bin': '{appscript}/script.bin', + 'scripts/babar.sh': '{appscript}/babar.sh', + 'Babarlikestrawberry': None} + self.assertRulesMatch(rules, spec) + + def test_set_match_multiple(self): + rules = [('scripts', 'script{s,}.{bin,sh}', '{appscript}')] + spec = {'scripts/scripts.bin': '{appscript}/scripts.bin', + 'scripts/script.sh': '{appscript}/script.sh', + 'Babarlikestrawberry': None} + self.assertRulesMatch(rules, spec) + + def test_set_match_exclude(self): + rules = [('scripts', '*', '{appscript}'), + ('', os.path.join('**', '*.sh'), None)] + spec = {'scripts/scripts.bin': '{appscript}/scripts.bin', + 'scripts/script.sh': None, + 'Babarlikestrawberry': None} + self.assertRulesMatch(rules, spec) + + def test_glob_in_base(self): + rules = [('scrip*', '*.bin', '{appscript}')] + spec = {'scripts/scripts.bin': '{appscript}/scripts.bin', + 'scripouille/babar.bin': '{appscript}/babar.bin', + 'scriptortu/lotus.bin': '{appscript}/lotus.bin', + 'Babarlikestrawberry': None} + self.assertRulesMatch(rules, spec) + + def test_recursive_glob(self): + rules = [('', os.path.join('**', '*.bin'), '{binary}')] + spec = {'binary0.bin': '{binary}/binary0.bin', + 'scripts/binary1.bin': '{binary}/scripts/binary1.bin', + 'scripts/bin/binary2.bin': '{binary}/scripts/bin/binary2.bin', + 'you/kill/pandabear.guy': None} + self.assertRulesMatch(rules, spec) + + def test_final_exemple_glob(self): + rules = [ + ('mailman/database/schemas/', '*', '{appdata}/schemas'), + ('', os.path.join('**', '*.tpl'), '{appdata}/templates'), + ('', os.path.join('developer-docs', '**', '*.txt'), '{doc}'), + ('', 'README', '{doc}'), + ('mailman/etc/', '*', '{config}'), + ('mailman/foo/', os.path.join('**', 'bar', '*.cfg'), + '{config}/baz'), + ('mailman/foo/', os.path.join('**', '*.cfg'), '{config}/hmm'), + ('', 'some-new-semantic.sns', '{funky-crazy-category}'), + ] + spec = { + 'README': '{doc}/README', + 'some.tpl': '{appdata}/templates/some.tpl', + 'some-new-semantic.sns': + '{funky-crazy-category}/some-new-semantic.sns', + 'mailman/database/mailman.db': None, + 'mailman/database/schemas/blah.schema': + '{appdata}/schemas/blah.schema', + 'mailman/etc/my.cnf': '{config}/my.cnf', + 'mailman/foo/some/path/bar/my.cfg': + '{config}/hmm/some/path/bar/my.cfg', + 'mailman/foo/some/path/other.cfg': + '{config}/hmm/some/path/other.cfg', + 'developer-docs/index.txt': '{doc}/developer-docs/index.txt', + 'developer-docs/api/toc.txt': '{doc}/developer-docs/api/toc.txt', + } + self.maxDiff = None + self.assertRulesMatch(rules, spec) + + def test_get_file(self): + # Create a fake dist + temp_site_packages = tempfile.mkdtemp() + self.addCleanup(shutil.rmtree, temp_site_packages) + + dist_name = 'test' + dist_info = os.path.join(temp_site_packages, 'test-0.1.dist-info') + os.mkdir(dist_info) + + metadata_path = os.path.join(dist_info, 'METADATA') + resources_path = os.path.join(dist_info, 'RESOURCES') + + with open(metadata_path, 'w') as fp: + fp.write(dedent("""\ + Metadata-Version: 1.2 + Name: test + Version: 0.1 + Summary: test + Author: me + """)) + + test_path = 'test.cfg' + + fd, test_resource_path = tempfile.mkstemp() + os.close(fd) + self.addCleanup(os.remove, test_resource_path) + + with open(test_resource_path, 'w') as fp: + fp.write('Config') + + with open(resources_path, 'w') as fp: + fp.write('%s,%s' % (test_path, test_resource_path)) + + # Add fake site-packages to sys.path to retrieve fake dist + self.addCleanup(sys.path.remove, temp_site_packages) + sys.path.insert(0, temp_site_packages) + + # Force distutils2.database to rescan the sys.path + self.addCleanup(enable_cache) + disable_cache() + + # Try to retrieve resources paths and files + self.assertEqual(get_file_path(dist_name, test_path), + test_resource_path) + self.assertRaises(KeyError, get_file_path, dist_name, 'i-dont-exist') + + with get_file(dist_name, test_path) as fp: + self.assertEqual(fp.read(), 'Config') + self.assertRaises(KeyError, get_file, dist_name, 'i-dont-exist') + + +def test_suite(): + suite = unittest.TestSuite() + load = unittest.defaultTestLoader.loadTestsFromTestCase + suite.addTest(load(TestDistribution)) + suite.addTest(load(TestEggInfoDistribution)) + suite.addTest(load(TestDatabase)) + suite.addTest(load(DataFilesTestCase)) + return suite + + +if __name__ == "__main__": + unittest.main(defaultTest='test_suite') diff --git a/distutils2/tests/test_depgraph.py b/distutils2/tests/test_depgraph.py --- a/distutils2/tests/test_depgraph.py +++ b/distutils2/tests/test_depgraph.py @@ -1,33 +1,25 @@ -"""Tests for distutils.depgraph """ +"""Tests for distutils2.depgraph """ +import os +import re +import sys +from StringIO import StringIO + +import distutils2.database +from distutils2 import depgraph from distutils2.tests import unittest, support -from distutils2 import depgraph -from distutils2._backport import pkgutil +from distutils2.tests.support import requires_zlib -import os -import sys -import re -try: - import cStringIO as StringIO -except ImportError: - import StringIO class DepGraphTestCase(support.LoggingCatcher, - support.WarningsCatcher, unittest.TestCase): DISTROS_DIST = ('choxie', 'grammar', 'towel-stuff') - DISTROS_EGG = ('bacon', 'banana', 'strawberry', 'cheese') + DISTROS_EGG = ('bacon', 'banana', 'strawberry', 'cheese') BAD_EGGS = ('nut',) EDGE = re.compile( - r'"(?P.*)" -> "(?P.*)" \[label="(?P