[Python-checkins] r88872 - in tracker/roundup-src: BUILD.txt CHANGES.txt demo.py doc/FAQ.txt doc/Makefile doc/acknowledgements.txt doc/admin_guide.txt doc/announcement.txt doc/customizing.txt doc/debugging.txt doc/design.txt doc/developers.txt doc/index.txt doc/installation.txt doc/mysql.txt doc/postgresql.txt doc/upgrading.txt doc/user_guide.txt doc/xmlrpc.txt frontends/roundup.cgi locale/de.po locale/it.po roundup/__init__.py roundup/actions.py roundup/admin.py roundup/anypy/cookie_.py roundup/anypy/dbm_.py roundup/anypy/email_.py roundup/anypy/hashlib_.py roundup/anypy/http_.py roundup/anypy/io_.py roundup/anypy/sets_.py roundup/anypy/urllib_.py roundup/backends/__init__.py roundup/backends/back_anydbm.py roundup/backends/back_mysql.py roundup/backends/back_postgresql.py roundup/backends/back_sqlite.py roundup/backends/blobfiles.py roundup/backends/indexer_common.py roundup/backends/indexer_dbm.py roundup/backends/indexer_rdbms.py roundup/backends/indexer_xapian.py roundup/backends/locking.py roundup/backends/portalocker.py roundup/backends/rdbms_common.py roundup/backends/sessions_dbm.py roundup/backends/sessions_rdbms.py roundup/cgi/KeywordsExpr.py roundup/cgi/accept_language.py roundup/cgi/actions.py roundup/cgi/cgitb.py roundup/cgi/client.py roundup/cgi/form_parser.py roundup/cgi/templating.py roundup/cgi/wsgi_handler.py roundup/configuration.py roundup/date.py roundup/dist/command/build.py roundup/dist/command/install_lib.py roundup/hyperdb.py roundup/init.py roundup/instance.py roundup/mailer.py roundup/mailgw.py roundup/password.py roundup/roundupdb.py roundup/scripts/roundup_mailgw.py roundup/scripts/roundup_server.py roundup/security.py roundup/xmlrpc.py scripts/imapServer.py setup.py share/roundup/templates/classic/html/_generic.index.html share/roundup/templates/classic/html/_generic.keywords_expr.html share/roundup/templates/classic/html/issue.search.html share/roundup/templates/classic/html/page.html share/roundup/templates/classic/html/style.css share/roundup/templates/classic/schema.py share/roundup/templates/minimal/html/_generic.index.html share/roundup/templates/minimal/html/page.html share/roundup/templates/minimal/html/style.css share/roundup/templates/minimal/schema.py test/db_test_base.py test/memorydb.py test/session_common.py test/test_actions.py test/test_cgi.py test/test_dates.py test/test_indexer.py test/test_mailer.py test/test_mailgw.py test/test_memorydb.py test/test_multipart.py test/test_mysql.py test/test_postgresql.py test/test_security.py test/test_sqlite.py test/test_templating.py test/test_xmlrpc.py

ezio.melotti python-checkins at python.org
Thu Aug 4 15:46:53 CEST 2011


Author: ezio.melotti
Date: Thu Aug  4 15:46:52 2011
New Revision: 88872

Log:
#411: Upgrade to 1.4.19.

Added:
   tracker/roundup-src/roundup/anypy/cookie_.py
   tracker/roundup-src/roundup/anypy/dbm_.py
   tracker/roundup-src/roundup/anypy/email_.py
   tracker/roundup-src/roundup/anypy/http_.py
   tracker/roundup-src/roundup/anypy/io_.py
   tracker/roundup-src/roundup/anypy/urllib_.py
   tracker/roundup-src/roundup/cgi/KeywordsExpr.py
   tracker/roundup-src/roundup/dist/command/install_lib.py
   tracker/roundup-src/share/roundup/templates/classic/html/_generic.keywords_expr.html
   tracker/roundup-src/test/memorydb.py
   tracker/roundup-src/test/test_mailer.py
   tracker/roundup-src/test/test_memorydb.py
Modified:
   tracker/roundup-src/BUILD.txt
   tracker/roundup-src/CHANGES.txt
   tracker/roundup-src/demo.py
   tracker/roundup-src/doc/FAQ.txt
   tracker/roundup-src/doc/Makefile
   tracker/roundup-src/doc/acknowledgements.txt
   tracker/roundup-src/doc/admin_guide.txt
   tracker/roundup-src/doc/announcement.txt
   tracker/roundup-src/doc/customizing.txt
   tracker/roundup-src/doc/debugging.txt
   tracker/roundup-src/doc/design.txt
   tracker/roundup-src/doc/developers.txt
   tracker/roundup-src/doc/index.txt
   tracker/roundup-src/doc/installation.txt
   tracker/roundup-src/doc/mysql.txt
   tracker/roundup-src/doc/postgresql.txt
   tracker/roundup-src/doc/upgrading.txt
   tracker/roundup-src/doc/user_guide.txt
   tracker/roundup-src/doc/xmlrpc.txt
   tracker/roundup-src/frontends/roundup.cgi
   tracker/roundup-src/locale/de.po
   tracker/roundup-src/locale/it.po
   tracker/roundup-src/roundup/__init__.py
   tracker/roundup-src/roundup/actions.py
   tracker/roundup-src/roundup/admin.py
   tracker/roundup-src/roundup/anypy/hashlib_.py
   tracker/roundup-src/roundup/anypy/sets_.py
   tracker/roundup-src/roundup/backends/__init__.py
   tracker/roundup-src/roundup/backends/back_anydbm.py
   tracker/roundup-src/roundup/backends/back_mysql.py
   tracker/roundup-src/roundup/backends/back_postgresql.py
   tracker/roundup-src/roundup/backends/back_sqlite.py
   tracker/roundup-src/roundup/backends/blobfiles.py
   tracker/roundup-src/roundup/backends/indexer_common.py
   tracker/roundup-src/roundup/backends/indexer_dbm.py
   tracker/roundup-src/roundup/backends/indexer_rdbms.py
   tracker/roundup-src/roundup/backends/indexer_xapian.py
   tracker/roundup-src/roundup/backends/locking.py
   tracker/roundup-src/roundup/backends/portalocker.py
   tracker/roundup-src/roundup/backends/rdbms_common.py
   tracker/roundup-src/roundup/backends/sessions_dbm.py
   tracker/roundup-src/roundup/backends/sessions_rdbms.py
   tracker/roundup-src/roundup/cgi/accept_language.py
   tracker/roundup-src/roundup/cgi/actions.py
   tracker/roundup-src/roundup/cgi/cgitb.py
   tracker/roundup-src/roundup/cgi/client.py
   tracker/roundup-src/roundup/cgi/form_parser.py
   tracker/roundup-src/roundup/cgi/templating.py
   tracker/roundup-src/roundup/cgi/wsgi_handler.py
   tracker/roundup-src/roundup/configuration.py
   tracker/roundup-src/roundup/date.py
   tracker/roundup-src/roundup/dist/command/build.py
   tracker/roundup-src/roundup/hyperdb.py
   tracker/roundup-src/roundup/init.py
   tracker/roundup-src/roundup/instance.py
   tracker/roundup-src/roundup/mailer.py
   tracker/roundup-src/roundup/mailgw.py
   tracker/roundup-src/roundup/password.py
   tracker/roundup-src/roundup/roundupdb.py
   tracker/roundup-src/roundup/scripts/roundup_mailgw.py
   tracker/roundup-src/roundup/scripts/roundup_server.py
   tracker/roundup-src/roundup/security.py
   tracker/roundup-src/roundup/xmlrpc.py
   tracker/roundup-src/scripts/imapServer.py
   tracker/roundup-src/setup.py
   tracker/roundup-src/share/roundup/templates/classic/html/_generic.index.html
   tracker/roundup-src/share/roundup/templates/classic/html/issue.search.html
   tracker/roundup-src/share/roundup/templates/classic/html/page.html
   tracker/roundup-src/share/roundup/templates/classic/html/style.css
   tracker/roundup-src/share/roundup/templates/classic/schema.py
   tracker/roundup-src/share/roundup/templates/minimal/html/_generic.index.html
   tracker/roundup-src/share/roundup/templates/minimal/html/page.html
   tracker/roundup-src/share/roundup/templates/minimal/html/style.css
   tracker/roundup-src/share/roundup/templates/minimal/schema.py
   tracker/roundup-src/test/db_test_base.py
   tracker/roundup-src/test/session_common.py
   tracker/roundup-src/test/test_actions.py
   tracker/roundup-src/test/test_cgi.py
   tracker/roundup-src/test/test_dates.py
   tracker/roundup-src/test/test_indexer.py
   tracker/roundup-src/test/test_mailgw.py
   tracker/roundup-src/test/test_multipart.py
   tracker/roundup-src/test/test_mysql.py
   tracker/roundup-src/test/test_postgresql.py
   tracker/roundup-src/test/test_security.py
   tracker/roundup-src/test/test_sqlite.py
   tracker/roundup-src/test/test_templating.py
   tracker/roundup-src/test/test_xmlrpc.py

Modified: tracker/roundup-src/BUILD.txt
==============================================================================
--- tracker/roundup-src/BUILD.txt	(original)
+++ tracker/roundup-src/BUILD.txt	Thu Aug  4 15:46:52 2011
@@ -9,31 +9,33 @@
 Building and distributing a release of Roundup is done by running:
 
 1.  Make sure the unit tests run! "./run_tests.py"
-2.  XXX "tag" SVN??
-3.  Edit roundup/__init__.py and doc/announcement.txt to reflect the new
-    version and appropriate announcements. Add truncated announcement to
-    setup.py description field.
+2.  Edit roundup/__init__.py and doc/announcement.txt to reflect the new
+    version and appropriate announcements.
+3.  Note the SVN revision in the CHANGES.txt file.
 4.  Clean out all *.orig, *.rej, .#* files from the source.
 5.  python setup.py clean --all
 6.  Edit setup.py to ensure that all information therein (version, contact
     information etc) is correct.
-7.  python setup.py sdist --manifest-only
-8.  Check the MANIFEST to make sure that any new files are included. If
+7.  python setup.py build_doc
+8.  python setup.py sdist --manifest-only
+9.  Check the MANIFEST to make sure that any new files are included. If
     they are not, edit MANIFEST.in to include them. "Documentation" for
     MANIFEST.in may be found in disutils.filelist._parse_template_line.
-9.  python setup.py sdist
+10. python setup.py sdist
     (if you find sdist a little verbose, add "--quiet" to the end of the
      command)
-10. Unpack the new dist file in /tmp then a) run_test.py and b) demo.py
+11. Unpack the new dist file in /tmp then a) run_test.py and b) demo.py
     with all available Python versions.
-11. Generate gpg signature with "gpg -a --detach-sign"
-12. python setup.py bdist_rpm
-13. python setup.py bdist_wininst
-14. Send doc/announcement.txt to python-announce at python.org
-15. Notify any other news services as appropriate...
+12. Assuming all is well tag the release in SVN::
 
-      http://freshmeat.net/projects/roundup/
+      svn cp https://svn.roundup-tracker.org/svnroot/roundup/roundup/trunk \
+      https://svn.roundup-tracker.org/svnroot/roundup/roundup/tags/release-1-4-19
 
+13. python setup.py bdist_rpm
+14. python setup.py bdist_wininst
+15. Send doc/announcement.txt to python-announce at python.org and
+    roundup-users at lists.sourceforge.net and
+    roundup-devel at lists.sourceforge.net
 
 So, those commands in a nice, cut'n'pasteable form::
 
@@ -41,11 +43,17 @@
  find . -name '*.rej' -exec rm {} \;
  find . -name '.#*' -exec rm {} \;
  python setup.py clean --all
+ python setup.py build_doc
  python setup.py sdist --manifest-only
  python setup.py sdist --quiet
  python setup.py bdist_rpm
  python setup.py bdist_wininst
  python setup.py register
- python2.5 setup.py sdist upload --sign
+ python setup.py sdist upload --sign
+ python2.5 setup.py bdist_wininst upload --sign
 
+(if the last two fail make sure you're using python2.5+)
+Note that python2.6 won't correctly create a bdist_wininst install on
+Linux (it will produce a .exe with "linux" in the name). 2.7 still has
+this bug (Ralf)
 

Modified: tracker/roundup-src/CHANGES.txt
==============================================================================
--- tracker/roundup-src/CHANGES.txt	(original)
+++ tracker/roundup-src/CHANGES.txt	Thu Aug  4 15:46:52 2011
@@ -1,12 +1,365 @@
-This file contains the changes to the Roundup system over time. The entries
-are given with the most recent entry first.
+This file contains the changes to the Roundup system over time. 
+The entries are given with the most recent entry first. 
+Each entry has the deveoper who committed the change in brackets.
+Entries without name were done by Richard Jones.
+
+2011-XX-XX 1.4.20 (r4XXX)
+
+Features:
+Fixed:
+
+issue2550695: 'No sort or group' settings not retained when editing queries.
+  Reported and fixed by John Kristensen. Tested by Satchidanand Haridas. 
+  (Bernhard)
+
+2011-07-15 1.4.19 (r4638)
+
+Features:
+
+- Xapian indexing improved: Slightly faster and slightly smaller database. 
+  Closes issue2550687. Thanks to Olly Betts for the patch. (Bernhard Reiter)
+- PostgreSQL backend minor improvement: database creation less likely to fail
+  for PostgreSQL versions >= 8.1 as the table "postgres" is used by default.
+  Closes issue2550543. Thanks to Kai Storbeck for the patch. (Bernhard Reiter)
+- Allow HTMLRequest.batch to filter on other permissions than "View"
+  (e.g. on the new "Search" permission") by adding a "permission"
+  parameter. Thanks to Eli Collins for the patch. Closes issue2550699. (Ralf)
+
+Fixed:
+
+- Installation: Fixed an issue that prevented to use EasyInstall 
+  and a Python egg. Thanks to Satchidanand Haridas for the patch and
+  John Kristensen for testing it. (Bernhard Reiter)
+- The PostgreSQL backend quotes database names now for CREATE and DROP, 
+  enabling more exotic tracker names. Closes issue2550497. 
+  Thanks to Sebastian Harl for providing the patch. (Bernhard Reiter)
+- Updated the url to point to www.roundup-tracker.org in two places in the
+  docs. (Bernhard Reiter)
+- Do not depend on a CPython implementation detail anymore to make Roundup 
+  more compatible with other Python implementations like PyPy.
+  Closes issue2550707. Thanks to Christof Meerwald. (Bernhard Reiter, Richard)
+- Yet another fix to the mail gateway, messages got *all* files of
+  an issue, not just the new ones. Thanks to Rafal Bisingier for
+  reporting and proposing a fix. The regression test was updated.
+  (Ralf)
+- Fix version numbers in upgrade documentation, the file-unlink defect
+  was in 1.4.17 not 1.4.16. Thanks to Rafal Bisingier. (Ralf)
+- Fix encoded email header parsing if multiple encoded and non-encoded
+  parts are present. RFC2047 specifies that spacing is removed only
+  between encoded parts, we always removed the space. Note that this bug
+  was present before mail gateway refactoring :-) Thanks for thorough
+  testing of mail gateway code by Rafal Bisingier. (Ralf)
+- The "Retire" permission was not being registered. (Richard)
+- Fix StringIO issue2550713: io.StringIO in newer versions of python
+  returns unicode strings and expects a unicode string in the
+  constructor. Unfortunately csv  doesn't handle unicode (yet). So we
+  need to use a BytesIO which gets the utf-8 string from the
+  web-interface. Compatibility for old versions by using
+  StringIO.StringIO for emulating a io.BytesIO also works.
+  Thanks to Cédric Krier for reporting. Closes issue2550713.
+  Added a regression test for EditCSVAction (Ralf)
+- Fix issue2550691 where a Unix From-Header was sometimes inserted in
+  outgoing emails, thanks to Joseph Myers for the patch. (Ralf)
+
+
+2011-05-29 1.4.18 (r4610)
+
+Features:
+
+- Norwegian Bokmal translation by Christian Aastorp (Ralf)
+- Allow to specify additional cc and bcc emails (not roundup users) for
+  nosymessage used by the nosyreaction reactor. (Ralf)
+
+Fixed:
+
+- File-unlink defect in mailgw fixed! If an email was received
+  that contained no attachments, all previous files of the issue were unlinked. 
+  This defect was introduced with the 1.4.17 release as an unwanted result 
+  of the mail gate code refactoring. Thanks to Rafal Bisingier for reporting 
+  and proposing a fix. There is now a regression test in place. (Ralf)
+
+2011-05-13 1.4.17 (r4605)
+
+Features:
+
+- Allow declaration of default_values for properties in schema.
+- Add explicit "Search" permissions, see Security Fix below.
+- Add "lookup" method to xmlrpc interface (Ralf Schlatterbeck)
+- Multilinks can be filtered by combining elements with AND, OR and NOT
+  operators now. A javascript gui was added for "keywords", see issue2550648.
+  Developed by Sascha Teichmann; funded by Intevation. (Bernhard Reiter)
+- Factor MailGW message parsing into a separate class, thanks to John
+  Kristensen who did the major work in issue2550576 -- I wouldn't
+  have attempted it without this. Fixes issue2550576. (Ralf)
+- Now if the -C option to roundup-mailgw specifies "issue" this refers
+  to an issue-like class. The real class is determined from the
+  configured default class, or the -c option to the mailgw, or the class
+  resulting from mail subject parsing. We also accept multiple -S
+  options for the same class now. (Ralf)
+- Optimisation: Late evaluation of Multilinks (only in rdbms backends):
+  previously we materialized each multilink in a Node -- this creates an
+  SQL query for each multilink (e.g. 'files' and 'messages' for each
+  line in the issue index display) -- even if the multilinks aren't
+  displayed. Now we compute multilinks only if they're accessed (and
+  keep them cached).
+- Add a filter_iter similar to the existing filter call. This feature is
+  considered experimental. This is currently not used in the
+  web-interface but passes all tests for the filter call except sorting
+  by Multilinks (which isn't supported by SQL and isn't a sane concept
+  anyway). When using filter_iter instead of filter this saves a *lot*
+  of SQL queries: Filter returns only the IDs of Nodes in the database,
+  the additional content of a Node has to be fetched in a separate SQL
+  call. The new filter_iter also returns the IDs of Nodes (one by one,
+  it's an iterator) but pre-seeds the cache with the content of the
+  Node. The information needed for seeding the cache is retrieved in the
+  same SQL query as the ids.
+
+Fixed:
+
+- Security Fix: Add a check for search-permissions: now we allow
+  searching for properties only if the property is readable without a
+  check method or if an explicit search permission (see above unter
+  "Features) is given for the property. This fixes cases where a user
+  doesn't have access to a property but can deduce the content by
+  crafting a clever search, group or sort query.
+  see doc/upgrading.txt for how to fix your trackers! (Ralf Schlatterbeck).
+- Range support in roundup-server so large files can be served, 
+  e.g. media files on iOS/iPads; issue2550694. (Bernhard Reiter; 
+  Thanks to Jon C. Thomason for the patch.)
+- Fix search for xapian 1.2 issue2550676 
+  (Bernhard Reiter; Thanks to Olly Betts for providing the patch.)
+- Some minor typos fixed in doc/customizing.txt (Thanks Ralf Hemmecke).
+- XML-RPC documentation now linked from the docs/index (Bernhard Reiter).
+- Fix setting of sys.path when importing schema.py, fixes issue2550675,
+  thanks to Bryce L Nordgren for reporting. (Ralf Schlatterbeck)
+- clear the cache on commit for rdbms backends: Don't carry over cached
+  values from one transaction to the next (there may be other changes
+  from other transactions) see new ConcurrentDBTest for a
+  read-modify-update cycle that fails with the old caching behavior.
+  (Ralf Schlatterbeck)
+- Fix incorrect setting of template in customizing.txt example action,
+  patch via issue2550682 (thanks John Kristensen)
+- Configuration issue: On some postgresql 8.4 installations (notably on
+  debian squeeze) the default template database used for database
+  creation doesn't match the needed character encoding UTF8 -- a new
+  config option 'template' in the rdbms section now allows specification
+  of the template. You know you need this option if you get the error
+  message:
+  psycopg2.DataError: new encoding (UTF8) is incompatible with the
+  encoding of the template database (SQL_ASCII)
+  HINT:  Use the same encoding as in the template database, or use
+  template0 as template.
+  (Ralf Schlatterbeck)
+- Fixed bug in mailgw refactoring, patch issue2550697 (thanks Hubert
+  Touvet)
+- Fix Password handling security issue2550688 (thanks Joseph Myers for
+  reporting and Eli Collins for fixing) -- this fixes all observations
+  by Joseph Myers except for auto-migration of existing passwords.
+- Add new config-option 'migrate_passwords' in section 'web' to
+  auto-migrate passwords at web-login time. Default for the new option
+  is "yes" so if you don't want that passwords are auto-migrated to a
+  more secure password scheme on user login, set this to "no" before
+  running your tracker(s) after the upgrade.
+- Add new config-option 'password_pbkdf2_default_rounds' in 'main'
+  section to configure the default parameter for new password
+  generation. Set this to a higher value on faster systems which want
+  more security. Thanks to Eli Collins for implementing this (see
+  issue2550688).
+- Fix documentation for roundup-server about the 'host' parameter as
+  suggested in issue2550693, fixes the first part of this issue. Make
+  'localhost' the new default for this parameter, note the upgrading
+  documentation of changed behaviour.  We also deprecate the empty host
+  parameter for binding to all interfaces now (still left in for
+  compatibility). Thanks to Toni Mueller for providing the first version
+  of this patch and discussing implementations.
+- Fixed bug in filter_iter refactoring (lazy multilinks), in rare cases
+  this would result in duplicate multilinks to the same node. We're now
+  going the safe route and doing lazy evaluation only for read-only
+  access, whenever updates are done we fetch everything.
+
+2010-10-08 1.4.16 (r4541)
+
+Features:
+
+- allow trackers to override the classes used to render properties in
+  templating per issue2550659 (thanks Ezio Melotti)
+- new mailgw configuration item "subject_updates_title": If set to "no"
+  a changed subject in a reply to an issue will not update the issue
+  title with the changed subject. Thanks to Arkadiusz Kita and Peter
+  Funk for requesting the feature and discussing the implementation.
+  http://thread.gmane.org/gmane.comp.bug-tracking.roundup.user/10169
+- new rdbms config item sqlite_timeout makes the previously hard-coded
+  timeout of 30 seconds configurable. This is the time a client waits
+  for the locked database to become free before giving up. Used only for
+  SQLite backend.
+- new mailgw config item unpack_rfc822 that unpacks message attachments
+  of type message/rfc822 and attaches the individual parts instead of
+  attaching the whole message/rfc822 attachment to the roundup issue.
+
+Fixed:
+
+- fixed reporting of source missing warnings
+- relevant tests made locale independent, issue2550660 (thanks
+  Benni Bärmann for reporting).
+- fix for incorrect except: syntax, issue2550661 (thanks Jakub Wilk)
+- No longer use the root logger, use a logger with prefix "roundup",
+  see http://thread.gmane.org/gmane.comp.bug-tracking.roundup.devel/5356
+- improve handling of '>' when URLs are converted to links, issue2550664
+  (thanks Ezio Melotti)
+- fixed registration, issue2550665 (thanks Timo Paulssen)
+- make sorting of multilinks in the web interface more robust, issue2550663
+- Fix charset of first text-part of outgoing multipart messages, thanks Dirk
+  Geschke for reporting, see
+  http://thread.gmane.org/gmane.comp.bug-tracking.roundup.user/10223
+- Fix handling of incoming message/rfc822 attachments. These resulted in
+  a weird mail usage error because the email module threw a TypeError
+  which roundup interprets as a Reject exception. Fixes issue2550667.
+  Added regression tests for message/rfc822 attachments with and without
+  configured unpacking (mailgw unpack_rfc822, see Features above)
+  Thanks to Benni Bärmann for reporting.
+- Allow search_popup macro to work with all db classes, issue2550567
+  (thanks John Kristensen)
+- lower memory footprint for (journal-) import
+
+
+2010-07-12 1.4.15
+
+Fixed:
+
+- A bunch of regressions were introduced in the last release making Roundup
+  no longer work in Python releases prior to 2.6
+- make URL detection a little smarter about brackets per issue2550657
+  (thanks Ezio Melotti)
+
+
+2010-07-01 1.4.14
+
+Features:
+
+- Preparations for getting 2to3 work, not completed yet. (Richard Jones)
+
+Fixed:
+
+- User input not escaped when a bad template name is supplied (thanks
+  Benjamin Pollack)
+- The email for the first message on an issue was having its In-Reply-To
+  set to itself (thanks Eric Kow)
+- Handle multiple @action values from broken trackers.
+- Accept single-character subject lines
+- xmlrpc handling of unicode characters and binary values, see
+  http://thread.gmane.org/gmane.comp.bug-tracking.roundup.user/10040
+  thanks to Hauke Duden for reporting these.
+- frontends/roundup.cgi got out of sync with the roundup.cgi.Client API
+- Default to "text/plain" if no Content-Type header is present in email
+  (thanks Hauke Duden)
+- Small documentation update regarding debugging aids (Bernhard Reiter)
+- Indexer Xapian, made Xapian 1.2 compatible. Needs at least Xapian 1.0.0 now.
+  (Bernhard Reiter; Thanks to Olly Betts for providing the patch Issue2550647.)
+
+
+2010-02-19 1.4.13
+
+Fixed:
+- Multilink edit fields lose their values (thanks Will Maier)
+
+
+2010-02-09 1.4.12 (r4455)
+
+Features:
+- Support IMAP CRAM-MD5, thanks Jochen Maes
+
+Fixes:
+- Proper handling of 'Create' permissions in both mail gateway (earlier
+  commit r4405 by Richard), web interface, and xmlrpc. This used to
+  check 'Edit' permission previously. See
+  http://thread.gmane.org/gmane.comp.bug-tracking.roundup.devel/5133
+  Add regression tests for proper handling of 'Create' and 'Edit'
+  permissions.
+- Fix handling of non-ascii in realname in the nosy mailer, this used to
+  mangle the email address making it unusable when replying. Thanks to
+  intevation for funding the fix.
+- Fix documentation on user required to run the tests, fixes 
+  issue2550618, thanks to Chris aka 'radioking'
+- Add simple doc about translating customised tracker content
+- Add "flup" setup documentation, thanks Christian Glass
+- Fix "Web Access" permission check to allow serving of static files to
+  Anonymous again
+- Add check for "Web Access" permission in all web templating permission
+  checks
+- Improvements in upgrading documentation, thanks Christian Glass
+- Display 'today' in the account user's timezone, thanks David Wolever
+- Fix file handle leak in some web interfaces with logging turned on,
+  fixes issue1675845
+- Attempt to generate more human-readable addresses in email, fixes
+  issue2550632
+- Allow value to be specified to multilink form element templating, fixes
+  issue2550613, thanks David Wolever
+- Fix thread safety with stdin in roundup-server, fixes issue2550596
+  (thanks Werner Hunger)
+
+
+2009-12-21 1.4.11 (r4413)
+
+Features:
+- Generic class editor may now restore retired items (thanks Ralf Hemmecke)
+
+Fixes:
+- Fix security hole allowing user permission escalation (thanks Ralf
+  Schlatterbeck)
+- More SSL fixes. SSL wants the underlying socket non-blocking. So we
+  don't call socket.setdefaulttimeout in case of SSL. This apparently
+  never raises a WantReadError from SSL.
+  This also fixes a case where a WantReadError is raised and apparently
+  the bytes already read are dropped (seems the WantReadError is really
+  an error, not just an indication to retry).
+- Correct initial- and end-handshakes for SSL
+- Update FAQ to mention infinite redirects with pathological settings of
+  the tracker->web variable. Closes issue2537286, thanks to "stuidge"
+  for reporting.
+- Fix some format errors in italian translation file
+- Some bugs issue classifiers were causing database lookup errors
+- Fix security-problem: If user hasn't permission on a message (notably
+  files and content properties) and is on the nosy list, the content was
+  sent via email. We now check that user has permission on the message
+  content and files properties. Thanks to Intevation for funding this
+  fix.
+- Fix traceback on .../msgN/ url, this requests the file content and for
+  apache mod_wsgi produced a traceback because the mime type is None for
+  messages, fixes issue2550586, thanks to Thomas Arendsen Hein for
+  reporting and to Intevation for funding the fix.
+- Handle OPTIONS http request method in wsgi handler, fixes issue2550587.
+  Thanks to Thomas Arendsen Hein for reporting and to Intevation for
+  funding the fix.
+- Add documentation for migrating to the Register permission and
+  fix mailgw to use Register permission, fixes issue2550599
+- Fix styling of calendar to make it more usable, fixes issue2550608
+- Fix typo in email section of user guide, fixes issue2550607
+- Fix WSGI response code (thanks Peter Pöml)
+- Fix linking of an existing item to a newly created item, e.g.
+  edit action in web template is name="issue-1 at link@msg" value="msg1"
+  would trigger a traceback about an unbound variable.
+  Add new regression test for this case. May be related to (now closed)
+  issue1177477. Thanks to Intevation for funding the fix.
+- Clean up all the places where role processing occurs. This is now in a
+  central place in hyperdb.Class and is used consistently throughout.
+  This also means now a template can override the way role processing
+  occurs (e.g. for elaborate permission schemes). Thanks to intevation
+  for funding the change.
+- Fix issue2550606 (german translation bug) "an hour" is only used in
+  the context "in an hour" or "an hour ago" which translates to german
+  "in einer Stunde" or "vor einer Stunde".  So "an hour" is translated
+  "einer Stunde" (which sounds wrong at first).  Also note that date.py
+  already has a comment saying "XXX this is internationally broken" --
+  but at least there's a workaround for german :-) Thanks to Chris
+  (radioking) for reporting.
+
 
 2009-10-09 1.4.10 (r4374)
 
 Fixes:
 - Minor update of doc/developers.txt to point to the new resources
   on www.roundup-tracker.org (Bernhard Reiter)
-- Small CSS improvements regaring the search box (thanks Thomas Arendsan Hein)
+- Small CSS improvements regaring the search box (thanks Thomas Arendsen Hein)
   (issue 2550589)
 - Indexers behaviour made more consistent regarding length of indexed words
   and stopwords (thanks Thomas Arendsen Hein, Bernhard Reiter)(issue 2550584)
@@ -16,12 +369,13 @@
   for export/import) has a new field size limit starting with python2.5.
   We now issue a warning during export if the limit is too small and use
   the csv_field_size configuration during import to set the limit for
-  the csv module.
+  the csv module. (Ralf Schlatterbeck)
 - Small fix for CGI-handling of XMLRPC requests for python2.4, this
   worked only for 2.5 and beyond due to a change in the xmlrpc interface
-  in python
-- Document filter method of xmlrpc interface
-- Fix interaction of SSL and XMLRPC, now XMLRPC works with SSL
+  in python (Ralf Schlatterbeck)
+- Document filter method of xmlrpc interface (Ralf Schlatterbeck)
+- Fix interaction of SSL and XMLRPC, now XMLRPC works with SSL 
+  (Ralf Schlatterbeck)
 
 2009-08-10 1.4.9 (r4346)
 

Modified: tracker/roundup-src/demo.py
==============================================================================
--- tracker/roundup-src/demo.py	(original)
+++ tracker/roundup-src/demo.py	Thu Aug  4 15:46:52 2011
@@ -115,7 +115,7 @@
 run demo on a server host, please stop the demo, open file
 "demo/config.ini" with your editor, change the host name in the "web"
 option in section "[tracker]", save the file, then re-run the demo
-program.
+program. If you want to change backend types, you must use "nuke".
 
 ''' % url
 

Modified: tracker/roundup-src/doc/FAQ.txt
==============================================================================
--- tracker/roundup-src/doc/FAQ.txt	(original)
+++ tracker/roundup-src/doc/FAQ.txt	Thu Aug  4 15:46:52 2011
@@ -190,6 +190,12 @@
 Make sure that the ``tracker`` -> ``web`` setting in your tracker's
 config.ini is set to the URL of the tracker.
 
+I'm getting infinite redirects in the browser
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+A wrong value for the ``tracker`` -> ``web`` setting may also result in
+infinite redirects, see http://issues.roundup-tracker.org/issue2537286
+
 
 How is sorting performed, and why does it seem to fail sometimes?
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~

Modified: tracker/roundup-src/doc/Makefile
==============================================================================
--- tracker/roundup-src/doc/Makefile	(original)
+++ tracker/roundup-src/doc/Makefile	Thu Aug  4 15:46:52 2011
@@ -1,4 +1,4 @@
-STXTOHTML = rst2html
+STXTOHTML = rst2html.py
 STXTOHT = rst2ht.py
 WEBDIR = ../../htdocs/htdocs/doc-1.0
 

Modified: tracker/roundup-src/doc/acknowledgements.txt
==============================================================================
--- tracker/roundup-src/doc/acknowledgements.txt	(original)
+++ tracker/roundup-src/doc/acknowledgements.txt	Thu Aug  4 15:46:52 2011
@@ -6,12 +6,16 @@
 
 Thanks also to the many people on the mailing list, in the sourceforge
 project and those who just report bugs:
+Christian Aastorp
 Thomas Arendsen Hein,
 Nerijus Baliunas,
+Benni Bärmann,
 Anthony Baxter,
 Marlon van den Berg,
 Bo Berglund,
+Olly Betts,
 Stéphane Bidoul,
+Rafal Bisingier,
 Cameron Blackwood,
 Jeff Blaine,
 Duncan Booth,
@@ -21,6 +25,7 @@
 Steve Byan,
 Brett Cannon,
 Godefroid Chapelle,
+Eli Collins,
 Roch'e Compaan,
 Wil Cooley,
 Joe Cooper,
@@ -29,6 +34,7 @@
 Bradley Dean,
 Toby Dickenson,
 Paul F. Dubois,
+Hauke Duden,
 Eric Earnst,
 Peter Eisentraut,
 Andrew Eland,
@@ -40,8 +46,10 @@
 Stuart D. Gathman,
 Martin Geisler,
 Ajit George,
+Dirk Geschke,
 Frank Gibbons,
 Johannes Gijsbers,
+Christian Glass,
 Gus Gollings,
 Philipp Gortan,
 Dan Grassi,
@@ -51,11 +59,15 @@
 Engelbert Gruber,
 Bruce Guenter,
 Tamás Gulácsi,
+Satchidanand Haridas,
+Sebastian Harl,
+Ralf Hemmecke,
 Juergen Hermann,
 Tobias Herp,
 Uwe Hoffmann,
 Alex Holkner,
 Tobias Hunger,
+Werner Hunger,
 Simon Hyde,
 Paul Jimenez,
 Christophe Kalt,
@@ -66,29 +78,38 @@
 Michael Klatt,
 Bastian Kleineidam,
 Axel Kollmorgen,
-Cedric Krier,
+Cédric Krier,
+John Kristensen,
 Detlef Lannert,
 Andrey Lebedev,
 Henrik Levkowetz,
 David Linke,
 Martin v. Löwis,
 Fredrik Lundh,
+Jochen Maes,
 Will Maier,
 Ksenia Marasanova,
 Georges Martin,
 Gordon McMillan,
+Christof Meerwald,
 John F Meinel Jr,
 Roland Meister,
+Ezio Melotti,
 Ulrik Mikaelsson,
 John Mitchell,
 Ramiro Morales,
 Toni Mueller,
+Joseph Myers,
 Stefan Niederhauser,
 Truls E. Næss,
+Bryce L Nordgren,
 Patrick Ohly,
 Luke Opperman,
 Eddie Parker,
 Will Partain,
+Timo Paulssen,
+Benjamin Pollack,
+Peter Pöml,
 Ewout Prangsma,
 Marcus Priesch,
 Bernhard Reiter,
@@ -114,10 +135,12 @@
 Nathaniel Smith,
 Leonardo Soto,
 Maciej Starzyk,
+Kai Storbeck,
 Mitchell Surface,
 Anatoly T.,
 Jon C. Thomason
 Mike Thompson,
+Hubert Touvet,
 Michael Twomey,
 Joseph E. Trent,
 Karl Ulbrich,
@@ -125,8 +148,10 @@
 Darryl VanDorp,
 J Vickroy,
 Timothy J. Warren,
+Jakub Wilk,
 William (Wilk),
 Tue Wennerberg,
 Matt Wilbert,
 Chris Withers,
+David Wolever,
 Milan Zamazal.

Modified: tracker/roundup-src/doc/admin_guide.txt
==============================================================================
--- tracker/roundup-src/doc/admin_guide.txt	(original)
+++ tracker/roundup-src/doc/admin_guide.txt	Thu Aug  4 15:46:52 2011
@@ -74,7 +74,7 @@
 
     [main]
     port = 8080
-    ;hostname =
+    ;host =
     ;user =
     ;group =
     ;log_ip = yes
@@ -93,9 +93,13 @@
 
 **port**
   Defines the local TCP port to listen for clients on.
-**hostname**
-  Defines the local hostname to listen for clients on. Only required if
-  "localhost" is not sufficient.
+**host**
+  Defines the hostname or IP number to listen for clients on. Only
+  required if `localhost` is not sufficient. If left empty (as opposed
+  to no `host` keyword in the config-file) this will listen to all
+  network interfaces and is equivalent to an explicit address `0.0.0.0`.
+  The use of an empty string to listen to all interfaces is deprecated
+  and will go away in a future version.
 **user** and **group**
   Defines the Unix user and group to run the server as. Only work if the
   server is started as root.

Modified: tracker/roundup-src/doc/announcement.txt
==============================================================================
--- tracker/roundup-src/doc/announcement.txt	(original)
+++ tracker/roundup-src/doc/announcement.txt	Thu Aug  4 15:46:52 2011
@@ -1,28 +1,57 @@
-I'm proud to release version 1.4.10 of Roundup which fixes some bugs:
+I'm proud to release version 1.4.19 of Roundup which introduces some
+minor features and, as usual, fixes some bugs:
 
-- Minor update of doc/developers.txt to point to the new resources
-  on www.roundup-tracker.org (Bernhard Reiter)
-- Small CSS improvements regaring the search box (thanks Thomas Arendsan Hein)
-  (issue 2550589)
-- Indexers behaviour made more consistent regarding length of indexed words
-  and stopwords (thanks Thomas Arendsen Hein, Bernhard Reiter)(issue 2550584)
-- fixed typos in the installation instructions (thanks Thomas Arendsen Hein)
-  (issue 2550573) 
-- New config option csv_field_size: Pythons csv module (which is used
-  for export/import) has a new field size limit starting with python2.5.
-  We now issue a warning during export if the limit is too small and use
-  the csv_field_size configuration during import to set the limit for
-  the csv module.
-- Small fix for CGI-handling of XMLRPC requests for python2.4, this
-  worked only for 2.5 and beyond due to a change in the xmlrpc interface
-  in python
-- Document filter method of xmlrpc interface
-- Fix interaction of SSL and XMLRPC, now XMLRPC works with SSL
+Features:
+
+- Xapian indexing improved: Slightly faster and slightly smaller database. 
+  Closes issue2550687. Thanks to Olly Betts for the patch. (Bernhard Reiter)
+- PostgreSQL backend minor improvement: database creation less likely to fail
+  for PostgreSQL versions >= 8.1 as the table "postgres" is used by default.
+  Closes issue2550543. Thanks to Kai Storbeck for the patch. (Bernhard Reiter)
+- Allow HTMLRequest.batch to filter on other permissions than "View"
+  (e.g. on the new "Search" permission") by adding a "permission"
+  parameter. Thanks to Eli Collins for the patch. Closes issue2550699. (Ralf)
+
+Fixed:
+
+- Installation: Fixed an issue that prevented to use EasyInstall 
+  and a Python egg. Thanks to Satchidanand Haridas for the patch and
+  John Kristensen for testing it. (Bernhard Reiter)
+- The PostgreSQL backend quotes database names now for CREATE and DROP, 
+  enabling more exotic tracker names. Closes issue2550497. 
+  Thanks to Sebastian Harl for providing the patch. (Bernhard Reiter)
+- Updated the url to point to www.roundup-tracker.org in two places in the
+  docs. (Bernhard Reiter)
+- Do not depend on a CPython implementation detail anymore to make Roundup 
+  more compatible with other Python implementations like PyPy.
+  Closes issue2550707. Thanks to Christof Meerwald. (Bernhard Reiter, Richard)
+- Yet another fix to the mail gateway, messages got *all* files of
+  an issue, not just the new ones. Thanks to Rafal Bisingier for
+  reporting and proposing a fix. The regression test was updated.
+  (Ralf)
+- Fix version numbers in upgrade documentation, the file-unlink defect
+  was in 1.4.17 not 1.4.16. Thanks to Rafal Bisingier. (Ralf)
+- Fix encoded email header parsing if multiple encoded and non-encoded
+  parts are present. RFC2047 specifies that spacing is removed only
+  between encoded parts, we always removed the space. Note that this bug
+  was present before mail gateway refactoring :-) Thanks for thorough
+  testing of mail gateway code by Rafal Bisingier. (Ralf)
+- The "Retire" permission was not being registered. (Richard)
+- Fix StringIO issue2550713: io.StringIO in newer versions of python
+  returns unicode strings and expects a unicode string in the
+  constructor. Unfortunately csv  doesn't handle unicode (yet). So we
+  need to use a BytesIO which gets the utf-8 string from the
+  web-interface. Compatibility for old versions by using
+  StringIO.StringIO for emulating a io.BytesIO also works.
+  Thanks to Cedric Krier for reporting. Closes issue2550713.
+  Added a regression test for EditCSVAction (Ralf)
+- Fix issue2550691 where a Unix From-Header was sometimes inserted in
+  outgoing emails, thanks to Joseph Myers for the patch. (Ralf)
 
 If you're upgrading from an older version of Roundup you *must* follow
 the "Software Upgrade" guidelines given in the maintenance documentation.
 
-Roundup requires python 2.3 or later (but not 3+) for correct operation.
+Roundup requires python 2.4 or later (but not 3+) for correct operation.
 
 To give Roundup a try, just download (see below), unpack and run::
 
@@ -31,7 +60,7 @@
 Release info and download page:
      http://cheeseshop.python.org/pypi/roundup
 Source and documentation is available at the website:
-     http://roundup.sourceforge.net/
+     http://roundup-tracker.org/
 Mailing lists - the place to ask questions:
      http://sourceforge.net/mail/?group_id=31577
 
@@ -56,7 +85,7 @@
 The system will facilitate communication among the participants by managing
 discussions and notifying interested parties when issues are edited. One of
 the major design goals for Roundup that it be simple to get going. Roundup
-is therefore usable "out of the box" with any python 2.3+ (but not 3+)
+is therefore usable "out of the box" with any python 2.4+ (but not 3+)
 installation. It doesn't even need to be "installed" to be operational,
 though an install script is provided.
 

Modified: tracker/roundup-src/doc/customizing.txt
==============================================================================
--- tracker/roundup-src/doc/customizing.txt	(original)
+++ tracker/roundup-src/doc/customizing.txt	Thu Aug  4 15:46:52 2011
@@ -362,6 +362,11 @@
   an issue for the interval after the issue's creation or last activity.
   The interval is a standard Roundup interval.
 
+ subject_updates_title -- ``yes``
+  Update issue title if incoming subject of email is different.
+  Setting this to ``no`` will ignore the title part of
+  the subject of incoming email messages.
+
  refwd_re -- ``(\s*\W?\s*(fw|fwd|re|aw|sv|ang)\W)+``
   Regular expression matching a single reply or forward prefix
   prepended by the mailer. This is explicitly stripped from the
@@ -384,6 +389,13 @@
   Regular expression matching a blank line.  Value is Python Regular
   Expression (UTF8-encoded).
 
+ ignore_alternatives -- ``no``
+  When parsing incoming mails, roundup uses the first
+  text/plain part it finds. If this part is inside a
+  multipart/alternative, and this option is set, all other
+  parts of the multipart/alternative are ignored. The default
+  is to keep all parts and attach them to the issue.
+
 Section **pgp**
  OpenPGP mail processing options
 
@@ -496,7 +508,7 @@
   them.
 **init**
   This function is responsible for setting up the initial state of your
-  tracker. It's called exactly once - but the ``roundup-admin initialise``
+  tracker. It's called exactly once - by the ``roundup-admin initialise``
   command.  See the start of the section on `database content`_ for more
   info about how this works.
 
@@ -958,6 +970,7 @@
 - Create (everything)
 - Edit (everything)
 - View (everything)
+- Register (User class only)
 
 These are assigned to the "Admin" Role by default, and allow a user to do
 anything. Every Class you define in your `tracker schema`_ also gets an
@@ -995,7 +1008,7 @@
 And the "Anonymous" Role is defined as:
 
 - Web interface access
-- Create user (for registration)
+- Register user (for registration)
 - View issue, file, msg, query, keyword, priority, status
 
 Put together, these settings appear in the tracker's ``schema.py`` file::
@@ -1166,7 +1179,7 @@
  - they're *anonymous*.
 
 **automatic registration of users in the e-mail gateway**
- By giving the "anonymous" user the ("Create", "user") Permission, any
+ By giving the "anonymous" user the ("Register", "user") Permission, any
  unidentified user will automatically be registered with the tracker
  (with no password, so they won't be able to log in through
  the web until an admin sets their password). By default new Roundup
@@ -1610,7 +1623,7 @@
 **user.register.html**
   a special page just for the user class, that renders the registration
   page
-**style.css.html**
+**style.css**
   a static file that is served up as-is
 
 The *classic* template has a number of additional templates.
@@ -1971,7 +1984,7 @@
 This is implemented by the ``roundup.cgi.templating.HTMLClass``
 class.
 
-This wrapper object provides access to a hyperb class. It is used
+This wrapper object provides access to a hyperdb class. It is used
 primarily in both index view and new item views, but it's also usable
 anywhere else that you wish to access information about a class, or the
 items of a class, when you don't have a specific item of that class in
@@ -2071,7 +2084,7 @@
 This is implemented by the ``roundup.cgi.templating.HTMLItem``
 class.
 
-This wrapper object provides access to a hyperb item.
+This wrapper object provides access to a hyperdb item.
 
 We allow access to properties. There will be no "id" property. The value
 accessed through the property will be the current value of the same name
@@ -2547,6 +2560,15 @@
 the "keyword" class (well, their "name" anyway).
 
 
+Translations
+~~~~~~~~~~~~
+
+Should you wish to enable multiple languages in template content that you
+create you'll need to add new locale files in the tracker home under a
+``locale`` directory. Use the instructions in the ``developer's guide`` to
+create the locale files.
+
+
 Displaying Properties
 ---------------------
 
@@ -4605,6 +4627,22 @@
         db.security.addPermissionToRole('User', 'Create', cl)
 
 
+Moderating user registration
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+You could set up new-user moderation in a public tracker by:
+
+1. creating a new highly-restricted user role "Pending",
+2. set the config new_web_user_roles and/or new_email_user_roles to that
+   role,
+3. have an auditor that emails you when new users are created with that
+   role using roundup.mailer
+4. edit the role to "User" for valid users.
+
+Some simple javascript might help in the last step. If you have high volume
+you could search for all currently-Pending users and do a bulk edit of all
+their roles at once (again probably with some simple javascript help).
+
 
 Changes to the Web User Interface
 ---------------------------------
@@ -4831,10 +4869,10 @@
             '''
             category = self.form['category'].value
             if category == '-1':
-                self.error_message.append('You must select a category of report')
+                self.client.error_message.append('You must select a category of report')
                 return
             # everything's ok, move on to the next page
-            self.template = 'add_page2'
+            self.client.template = 'add_page2'
 
     def init(instance):
         instance.registerAction('page1_submit', Page1SubmitAction)
@@ -4862,3 +4900,4 @@
 
 
 .. _`design documentation`: design.html
+.. _`developer's guide`: developers.html

Modified: tracker/roundup-src/doc/debugging.txt
==============================================================================
--- tracker/roundup-src/doc/debugging.txt	(original)
+++ tracker/roundup-src/doc/debugging.txt	Thu Aug  4 15:46:52 2011
@@ -1,31 +1,23 @@
-Debugging Flags
----------------
+Debugging Aids
+--------------
 
-Roundup uses a number of debugging environment variables to help you
-figure out what the heck it's doing. 
+Try turning on logging of DEBUG level messages. This may be done a number
+of ways, depending on what it is you're testing:
 
-HYPERDBDEBUG 
-============
+1. If you're testing the database unit tests, then set the environment
+   variable ``LOGGING_LEVEL=DEBUG``. This may be done like so:
 
-This environment variable should be set to a filename - the hyperdb will
-write debugging information for various events (including, for instance,
-the SQL used).
+    LOGGING_LEVEL=DEBUG python run_tests.py
 
-This is only obeyed when python is _not_ running in -O mode. 
+   This variable replaces the older HYPERDBDEBUG environment var.
 
-HYPERDBTRACE
-============
-
-This environment variable should be set to a filename - the hyperdb will
-write a timestamp entry for various events. This appears to be suffering
-rather extreme bit-rot and may go away soon.
-
-This is only obeyed when python is _not_ running in -O mode. 
+2. If you're testing a particular tracker, then set the logging level in
+   your tracker's ``config.ini``.
 
 SENDMAILDEBUG
 =============
 
-Set to a filename and roundup will write a copy of each email message
-that it sends to that file. This environment variable is independent of
-the python -O flag.
+Set to a filename and roundup will write each email message
+that it sends to that file instead to the internet. 
+This environment variable is independent of the python -O flag.
 

Modified: tracker/roundup-src/doc/design.txt
==============================================================================
--- tracker/roundup-src/doc/design.txt	(original)
+++ tracker/roundup-src/doc/design.txt	Thu Aug  4 15:46:52 2011
@@ -1008,7 +1008,7 @@
 Command Interface Specification
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 
-A single command, roundup, provides basic access to the hyperdatabase
+A single command, ``roundup-admin``, provides basic access to the hyperdatabase
 from the command line::
 
     roundup-admin help
@@ -1039,11 +1039,12 @@
   are both accepted; an empty string, a single item, or a list of items
   joined by commas is accepted.
 
-When multiple items are specified to the roundup get or roundup set
+When multiple items are specified to the roundup-admin get or roundup-admin set
 commands, the specified properties are retrieved or set on all the
 listed items.
 
-When multiple results are returned by the roundup get or roundup find
+When multiple results are returned by the roundup-admin get or
+roundup-admin find
 commands, they are printed one per line (default) or joined by commas
 (with the -list) option.
 
@@ -1055,8 +1056,8 @@
 "spam", for example, you could execute the following command from the
 directory where the database dumps its files::
 
-    shell% for issue in `roundup find issue status=in-progress`; do
-    > grep -l spam `roundup get $issue messages`
+    shell% for issue in `roundup-admin find issue status=in-progress`; do
+    > grep -l spam `roundup-admin get $issue messages`
     > done
     msg23
     msg49
@@ -1066,8 +1067,8 @@
 
 Or, using the -list option, this can be written as a single command::
 
-    shell% grep -l spam `roundup get \
-        \`roundup find -list issue status=in-progress\` messages`
+    shell% grep -l spam `roundup-admin get \
+        \`roundup-admin find -list issue status=in-progress\` messages`
     msg23
     msg49
     msg50
@@ -1156,7 +1157,7 @@
 The e-mail interface also provides a simple way to set properties on
 issues.  At the end of the subject line, ``propname=value`` pairs can be
 specified in square brackets, using the same conventions as for the
-roundup ``set`` shell command.
+roundup-admin ``set`` shell command.
 
 
 Web User Interface

Modified: tracker/roundup-src/doc/developers.txt
==============================================================================
--- tracker/roundup-src/doc/developers.txt	(original)
+++ tracker/roundup-src/doc/developers.txt	Thu Aug  4 15:46:52 2011
@@ -22,14 +22,36 @@
 - The issue tracker running at
   http://issues.roundup-tracker.org/
 
-Website, wiki, issue tracker
-----------------------------
+Website, wiki
+-------------
 
-1. Log into <username>,roundup at shell.sourceforge.net
+1. ssh -t <username>,roundup at shell.sourceforge.net create
 2. cd /home/groups/r/ro/roundup
 3. follow instructions in README.txt
 
 
+Issue Tracker
+-------------
+
+The tracker resides on psf.upfronthosting.co.za. The roundup installation
+belongs to the user roundup. In ~roundup, all trackers are stored and
+the roundup code itself. roundup is started through /etc/init.d/roundup;
+other parts of the installation are started through
+/etc/init.d/{postgresql-8-1,spambayes,postfix}.
+
+The machine is operated by Upfronthosting in South Africa. The meta
+tracker is http://psf.upfronthosting.co.za/roundup/meta/
+In this tracker, Upfronthosting people are the users izak and roche.
+
+The Roundup tracker http://issues.roundup-tracker.org/ is in
+~roundup/trackers/roundup
+
+The configuration is in the "web/trunk/issues" section of Roundup's
+Subversion repository and copied manually to the live tracker.
+
+A checkout of the roundup sources is in ~roundup/src/roundup-src.
+
+
 Small Changes
 -------------
 
@@ -75,19 +97,7 @@
 Debugging Aids
 --------------
 
-Try turning on logging of DEBUG level messages. This may be done a number
-of ways, depending on what it is you're testing:
-
-1. If you're testing the database unit tests, then set the environment
-   variable ``LOGGING_LEVEL=DEBUG``. This may be done like so:
-
-    LOGGING_LEVEL=DEBUG python run_tests.py
-
-   This variable replaces the older HYPERDBDEBUG environment var.
-
-2. If you're testing a particular tracker, then set the logging level in
-   your tracker's ``config.ini``.
-
+See `debugging.txt`_.
 
 Internationalization Notes
 --------------------------

Modified: tracker/roundup-src/doc/index.txt
==============================================================================
--- tracker/roundup-src/doc/index.txt	(original)
+++ tracker/roundup-src/doc/index.txt	Thu Aug  4 15:46:52 2011
@@ -15,6 +15,7 @@
    user_guide
    customizing
    admin_guide
+   xmlrpc
    spec
    original design <design>
    developers

Modified: tracker/roundup-src/doc/installation.txt
==============================================================================
--- tracker/roundup-src/doc/installation.txt	(original)
+++ tracker/roundup-src/doc/installation.txt	Thu Aug  4 15:46:52 2011
@@ -69,9 +69,7 @@
   installed and used. You will need to run the "roundup-admin reindex"
   command if the tracker has existing data.
 
-  Roundup requires Xapian *newer* than 0.9.2 - it may be necessary for
-  you to install a snapshot. Snapshot "0.9.2_svn6532" has been tried
-  successfully.
+  Roundup requires Xapian 1.0.0 or newer.
 
 pyopenssl
   If pyopenssl_ is installed the roundup-server can be configured
@@ -85,7 +83,7 @@
   configured, you can require email to be cryptographically signed
   before roundup will allow it to make modifications to issues.
 
-.. _Xapian: http://www.xapian.org/
+.. _Xapian: http://xapian.org/
 .. _pytz: http://www.python.org/pypi/pytz
 .. _Olson tz database: http://www.twinsun.com/tz/tz-link.htm
 .. _pyopenssl: http://pyopenssl.sourceforge.net
@@ -101,7 +99,7 @@
     and if it runs you may skip the `Basic Installation Steps`_
     below and go straight to `configuring your first tracker`_.
 
-Download the latest version from http://roundup.sf.net/.
+Download the latest version from http://www.roundup-tracker.org/.
 
 If you're using WinZIP's "classic" interface, make sure the "Use
 folder names" check box is checked before you extract the files.
@@ -234,7 +232,7 @@
                  Confirm:
 
       Note: running this command will *destroy any existing data in the
-      database*. In the case of MySQL and PostgreSQL, any exsting database
+      database*. In the case of MySQL and PostgreSQL, any existing database
       will be dropped and re-created.
 
       Once this is done, the tracker has been created.
@@ -326,10 +324,11 @@
 There are five web interfaces to choose from:
 
 1. `web server cgi-bin`_
-2. `stand-alone web server`_
-3. `Zope product - ZRoundup`_
-4. `Apache HTTP Server with mod_python`_
-5. `WSGI handler`_
+2. `cgi-bin for limited-access hosting`_
+3. `stand-alone web server`_
+4. `Zope product - ZRoundup`_
+5. `Apache HTTP Server with mod_python`_
+6. `WSGI handler`_
 
 You may need to give the web server user permission to access the tracker home
 - see the `UNIX environment steps`_ for information. You may also need to
@@ -387,6 +386,48 @@
  </Location>
 
 
+CGI-bin for Limited-Access Hosting
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+If you are running in a shared-hosting environment or otherwise don't have
+permissiong to edit the system web server's configuration, but can create a
+``.htaccess`` file then you may be able to use this approach.
+
+1. Install flup_
+2. Create a script ``roundup_stub`` in your server's ``cgi-bin`` directory
+   containing::
+
+    #!/usr/bin/env python
+
+    # if necessary modify the Python path to include the place you
+    # installed Roundup
+    #import sys
+    #sys.path.append('...')
+
+    # cgitb is needed for debugging in browser only
+    #import cgitb
+    #cgitb.enable()
+
+    # obtain the WSGI request dispatcher
+    from roundup.cgi.wsgi_handler import RequestDispatcher
+    tracker_home = '/path/to/tracker/home'
+    app = RequestDispatcher(tracker_home)
+
+    from flup.server.cgi import WSGIServer
+    WSGIServer(app).run()
+
+3. Modify or created the ``.htaccess`` file in the desired (sub-)domain
+   directory to contain::
+
+    RewriteEngine On
+    RewriteBase /
+    RewriteRule ^(.*)$      /cgi-bin/roundup_stub/$1 [L]
+
+Now loading the (sub-)domain in a browser should load the tracker web
+interface. If you get a "500" error then enable the "cgitb" lines in the
+stub to get some debugging information.
+
+
 Stand-alone Web Server
 ~~~~~~~~~~~~~~~~~~~~~~
 
@@ -998,14 +1039,23 @@
    - users of the Windows installer, other binary distributions or
    pre-installed Roundup will need to download the source to use it.
 
+   Remember to have a database user 'rounduptest' prepared (with
+   password 'rounduptest'). This user
+   must have at least the rights to create and drop databases.
+   Documentation: details on `adding MySQL users`_,
+   for PostgreSQL you want to call the ``createuser`` command with the
+   ``-d`` option to allow database creation.
+
 Once you've unpacked roundup's source, run ``python run_tests.py`` in the
 source directory and make sure there are no errors. If there are errors,
 please let us know!
 
 If the above fails, you may be using the wrong version of python. Try
-``python2 run_tests.py``. If that works, you will need to substitute
-``python2`` for ``python`` in all further commands you use in relation to
-Roundup -- from installation and scripts.
+``python2 run_tests.py`` or ``python2.X run_tests.py`` where ``X`` is in
+the set 3,4,5,6 depending on the version(s) of python installed.
+If that works, you will need to substitute ``python2`` or ``python2.X``
+for ``python`` in all further commands you use in relation to Roundup --
+from installation and scripts.
 
 
 .. _`table of contents`: index.html
@@ -1025,7 +1075,10 @@
 .. _External hyperlink targets:
 
 .. _apache: http://httpd.apache.org/
+.. _flup: http://pypi.python.org/pypi/flup
 .. _mod_python: http://www.modpython.org/
 .. _MySQLdb: http://sourceforge.net/projects/mysql-python
 .. _Psycopg: http://initd.org/software/initd/psycopg
 .. _pysqlite: http://pysqlite.org/
+.. _`adding MySQL users`:
+    http://dev.mysql.com/doc/refman/5.1/en/adding-users.html

Modified: tracker/roundup-src/doc/mysql.txt
==============================================================================
--- tracker/roundup-src/doc/mysql.txt	(original)
+++ tracker/roundup-src/doc/mysql.txt	Thu Aug  4 15:46:52 2011
@@ -37,8 +37,8 @@
        FLUSH PRIVILEGES;
 
 2. If your administrator has provided you with database connection info, 
-   you can modify MYSQL_* constants in the file test/test_db.py with 
-   the correct values.
+   see the config values in 'test/db_test_base.py' 
+   about which database connection, name and user will be used.
 
 The MySQL database should not contain any tables. Tests will not 
 drop the database with existing data.

Modified: tracker/roundup-src/doc/postgresql.txt
==============================================================================
--- tracker/roundup-src/doc/postgresql.txt	(original)
+++ tracker/roundup-src/doc/postgresql.txt	Thu Aug  4 15:46:52 2011
@@ -19,20 +19,14 @@
 
    It is recommended that you use at least version 1.1.21
 
-Some advice on setting up the postgresql backend may be found at:
-
-  http://www.magma.com.ni/wiki/index.cgi?TipsRoundupPostgres
-
 
 Running the PostgreSQL unit tests
 =================================
 
 The user that you're running the tests as will need to be able to access
 the postgresql database on the local machine and create and drop
-databases. Edit the ``test/test_postgresql.py`` database connection info if
-you wish to test against a different database.
-
-The test database will be called "rounduptest".
+databases. See the config values in 'test/db_test_base.py' 
+about which database connection, name and user will be used.
 
 
 Credit

Modified: tracker/roundup-src/doc/upgrading.txt
==============================================================================
--- tracker/roundup-src/doc/upgrading.txt	(original)
+++ tracker/roundup-src/doc/upgrading.txt	Thu Aug  4 15:46:52 2011
@@ -13,6 +13,212 @@
 
 .. contents::
 
+Migrating from 1.4.17 to 1.4.18
+===============================
+
+There was a bug in 1.4.17 where files were unlinked from issues if a
+mail without attachment was received via the mail interface. The
+following script will list likely issues being affected by the bug.
+The date in the script is the date of the 1.4.17 release. If you have
+installed 1.4.17 later than this date, you can change the date
+appropriately to your installation date. Run the script in the directory
+of your tracker.
+
+#!/usr/bin/python
+import os
+from roundup import instance
+from roundup.date import Date
+dir     = os.getcwd ()
+tracker = instance.open (dir)
+db      = tracker.open ('admin')
+# you may want to change this to your install date to find less candidates
+last_release = Date('2011-05-13')
+affected = {}
+for i in db.issue.getnodeids():
+    for j in db.issue.history(i):
+        if i in affected:
+            break
+        if j[1] < last_release or j[3] != 'set' or 'files' not in j[4]:
+            continue
+        for op, p in j[4]['files']:
+            if op == '-':
+                affected [i] = 1
+                break
+print ', '.join(sorted(affected.iterkeys()))
+
+To find out which files where attached before you can look in the
+history of the affected issue.  For fixing issues you can re-attach the
+files in question using the "set" command of roundup-admin, e.g., if the
+list of files attached to an issue should be files 5, 17, 23 for issue42
+you will set this using
+
+roundup-admin -i /path/to/your/tracker set issue42 files=5,17,23
+
+Migrating from 1.4.x to 1.4.17
+==============================
+
+There is a new config-option `migrate_passwords` in section `web` to
+auto-migrate passwords at web-login time to a more secure storage
+scheme. Default for the new option is "yes" so if you don't want that
+passwords are auto-migrated to a more secure password scheme on user
+login, set this to "no" before running your tracker(s) after the
+upgrade.
+
+The standalone roundup-server now defaults to listening on localhost (no
+longer on all network interfaces). This will not affect you if you're
+already using a configuration file for roundup-server. If you are using
+an empty setting for the `host` parameter in the config-file you should
+explicitly put 0.0.0.0 there as the use of an empty string to specify
+listening to all interfaces is deprecated and will go away in a future
+version.  If you are starting the server without a configuration file
+and want to explicitly listen to all network interface, you should
+specify the -n option with the address `0.0.0.0`.
+
+Searching now requires either read-permission without a check method, or
+you will have to add a "Search" permission for a class or a list of
+properties for a class (if you want to allow searching). For the classic
+template (or other templates derived from it) you want to add the
+following lines to your `schema.py` file::
+
+  p = db.security.addPermission(name='Search', klass='query')
+  db.security.addPermissionToRole('User', p)
+
+This is needed, because for the `query` class users may view only their
+own queries (or public queries). This is implemented with a `check`
+method, therefore the default search permissions will not allow
+searching and you'll have to add an explicit search permission.
+If you have modified your schema, you can check if you're missing any
+search permissions with the following script, run it in your tracker
+directory, it will list for each Class and Property the roles that may
+search for this property::
+
+    #!/usr/bin/python
+    import os
+    from roundup import instance
+    
+    tracker = instance.open(os.getcwd ())
+    db = tracker.open('admin')
+    
+    for cl in sorted(db.getclasses()):
+        print "Class:", cl
+        for p in sorted(db.getclass(cl).properties.keys()):
+            print "    Property:", p
+            roles = []
+            for role in sorted(db.security.role.iterkeys()):
+                if db.security.roleHasSearchPermission(cl,p,role):
+                    roles.append(role)
+            print "        roles may search:", ', '.join(roles)
+
+
+Migrating from 1.4.x to 1.4.12
+==============================
+
+Item creation now checks the "Create" permission instead of the "Edit"
+permission for individual properties. If you have modified your tracker
+permissions from the default distribution, you should check that
+"Create" permissions exist for all properties you want users to be able
+to create.
+
+
+Fixing some potential security holes
+------------------------------------
+
+Enhanced checking was added to the user registration auditor. If you
+run a public tracker you should update your tracker's
+``detectors/userauditor.py`` using the new code from
+``share/roundup/templates/classic/detectors/userauditor.py``. In most
+cases you may just copy the file over, but if you've made changes to
+the auditor in your tracker then you'll need to manually integrate
+the new code.
+
+Some HTML templates were found to have formatting security problems:
+
+``html/page.html``::
+
+  -tal:replace="request/user/username">username</span></b><br>
+  +tal:replace="python:request.user.username.plain(escape=1)">username</span></b><br>
+
+``html/_generic.help-list.html``::
+
+  -tal:content="structure python:item[prop]"></label>
+  +tal:content="python:item[prop]"></label>
+
+The lines marked "+" should be added and lines marked "-" should be
+deleted (minus the "+"/"-" signs).
+
+
+Some HTML interface tweaks
+--------------------------
+
+You may wish to copy the ``user_utils.js`` and ``style.css` files from the
+source distribution ``share/roundup/templates/classic/html/`` directory to the
+``html`` directory of your trackers as it includes a small improvement.
+
+If you have made local changes to those files you'll need to manually work
+the differences in to your versions or ignore the changes.
+
+
+Migrating from 1.4.x to 1.4.11
+==============================
+
+Close potential security hole
+-----------------------------
+
+If your tracker has untrusted users you should examine its ``schema.py``
+file and look for the section granting the "Edit" permission to your users.
+This should look something like::
+
+    p = db.security.addPermission(name='Edit', klass='user', check=own_record,
+        description="User is allowed to edit their own user details")
+
+and should be modified to restrict the list of properties they are allowed
+to edit by adding the ``properties=`` section like::
+
+    p = db.security.addPermission(name='Edit', klass='user', check=own_record,
+        properties=('username', 'password', 'address', 'realname', 'phone',
+            'organisation', 'alternate_addresses', 'queries', 'timezone'),
+        description="User is allowed to edit their own user details")
+
+Most importantly the "roles" property should not be editable - thus not
+appear in that list of properties.
+
+
+Grant the "Register" permission to the Anonymous role
+-----------------------------------------------------
+
+A separate "Register" permission has been introduced to allow
+anonymous users to register. This means you will need to add the
+following to your tracker's ``schema.py`` to add the permission and
+assign it to the Anonymous role (replacing any previously assigned
+"Create user" permission for the Anonymous role)::
+
+  +db.security.addPermission(name='Register', klass='user',
+  +     description='User is allowed to register new user')
+ 
+   # Assign the appropriate permissions to the anonymous user's Anonymous
+   # Role. Choices here are:
+   # - Allow anonymous users to register
+  -db.security.addPermissionToRole('Anonymous', 'Create', 'user')
+  +db.security.addPermissionToRole('Anonymous', 'Register', 'user')
+
+The lines marked "+" should be added and lines marked "-" should be
+deleted (minus the "+"/"-" signs).
+
+You should also modify the ``html/page.html`` template to change the
+permission tested there::
+
+   -tal:condition="python:request.user.hasPermission('Create', 'user')"
+   +tal:condition="python:request.user.hasPermission('Register', 'user')"
+
+
+Generic class editor may now restore retired items
+--------------------------------------------------
+
+The instructions for doing so won't be present in your tracker unless you copy
+the ``_generic.index.html`` template from the roundup distribution in
+``share/roundup/templates/classic/html`` to your tracker's ``html`` directory.
+
+
 Migrating from 1.4.x to 1.4.9
 =============================
 
@@ -106,7 +312,7 @@
 Fix the "retire" link in the users list for admin users
 -------------------------------------------------------
 
-The "retire" link found in the file ``html/users.index.html``::
+The "retire" link found in the file ``html/user.index.html``::
 
   <td tal:condition="context/is_edit_ok">
    <a tal:attributes="href string:user${user/id}?@action=retire&@template=index"
@@ -210,8 +416,8 @@
 roundup-index for full-text search. We recommend that you create the
 following database indexes on the database by hand::
 
- CREATE INDEX words_by_id ON __words (_textid)
- CREATE UNIQUE INDEX __textids_by_props ON __textids (_class, _itemid, _prop)
+ CREATE INDEX words_by_id ON __words (_textid);
+ CREATE UNIQUE INDEX __textids_by_props ON __textids (_class, _itemid, _prop);
 
 Migrating from 1.2.x to 1.3.0
 =============================
@@ -1216,8 +1422,8 @@
 and modify it according to your local schema changes.
 
 If you need help with the new templating system, please ask questions on the
-roundup-users mailing list (available through the roundup project page on
-sourceforge, http://roundup.sf.net/)
+roundup-users mailing list (available through the roundup web page on
+sourceforge, http://www.roundup-tracker.org/.
 
 
 0.5.0 Detectors

Modified: tracker/roundup-src/doc/user_guide.txt
==============================================================================
--- tracker/roundup-src/doc/user_guide.txt	(original)
+++ tracker/roundup-src/doc/user_guide.txt	Thu Aug  4 15:46:52 2011
@@ -440,7 +440,7 @@
 
 - setting the priority of an issue::
 
-   Subject: Re: [issue1] the coffee machine is broken! [priority=urgent]
+   Subject: Re: [issue2] the coffee machine is broken! [priority=urgent]
 
 - adding yourself to a nosy list::
 
@@ -616,11 +616,39 @@
  are both valid. The username and/or password will be prompted for if
  not supplied on the command-line.
 
+POPS:
+ Connect to a POP server over ssl. This requires python 2.4 or later.
+ This supports the same notation as POP::
+
+    pops username:password at server
+
 APOP:
  Same as POP, but using Authenticated POP::
 
     apop username:password at server
 
+IMAP:
+ Connect to an IMAP server. This supports the same notation as that of
+ POP mail::
+
+    imap username:password at server
+
+ It also allows you to specify a specific mailbox other than INBOX using
+ this format::
+
+    imap username:password at server mailbox
+
+IMAPS:
+ Connect to an IMAP server over ssl.
+ This supports the same notation as IMAP::
+
+    imaps username:password at server [mailbox]
+
+IMAPS_CRAM:
+ Connect to an IMAP server over ssl using CRAM-MD5 authentication.
+ This supports the same notation as IMAP::
+
+    imaps_cram username:password at server [mailbox]
 
 Command Line Tool
 =================

Modified: tracker/roundup-src/doc/xmlrpc.txt
==============================================================================
--- tracker/roundup-src/doc/xmlrpc.txt	(original)
+++ tracker/roundup-src/doc/xmlrpc.txt	Thu Aug  4 15:46:52 2011
@@ -65,6 +65,12 @@
         ``designator``. The new values are specified in ``arg_1`` through
         ``arg_N``. The arguments are name=value pairs (e.g. ``status='3'``).
 
+lookup  arguments: *classname, key_value*
+
+        looks up the key_value for the given class. The class needs to
+        have a key and the user needs search permission on the key
+        attribute and id for the given classname.
+
 filter  arguments: *classname, list or None, attributes*
         
         list can be None (requires ``allow_none=True`` when
@@ -100,3 +106,5 @@
         []
         >>> roundup_server.filter('user',[],{'username':'adm'})
         []
+        >>> roundup_server.lookup('user','admin')
+        '1'

Modified: tracker/roundup-src/frontends/roundup.cgi
==============================================================================
--- tracker/roundup-src/frontends/roundup.cgi	(original)
+++ tracker/roundup-src/frontends/roundup.cgi	Thu Aug  4 15:46:52 2011
@@ -120,6 +120,7 @@
     '''Used to make the CGI server look like a BaseHTTPRequestHandler
     '''
     def __init__(self, wfile):
+        self.rfile = sys.stdin
         self.wfile = wfile
     def write(self, data):
         self.wfile.write(data)

Modified: tracker/roundup-src/locale/de.po
==============================================================================
--- tracker/roundup-src/locale/de.po	(original)
+++ tracker/roundup-src/locale/de.po	Thu Aug  4 15:46:52 2011
@@ -1788,7 +1788,7 @@
 
 #: ../roundup/date.py:861
 msgid "an hour"
-msgstr "eine Stunde"
+msgstr "einer Stunde"
 
 #: ../roundup/date.py:863
 msgid "1 1/2 hours"

Modified: tracker/roundup-src/locale/it.po
==============================================================================
--- tracker/roundup-src/locale/it.po	(original)
+++ tracker/roundup-src/locale/it.po	Thu Aug  4 15:46:52 2011
@@ -5,7 +5,6 @@
 #
 # roundup.pot revision 1.22
 #
-#, fuzzy
 msgid ""
 msgstr ""
 "Project-Id-Version: roundup cvs\n"
@@ -798,7 +797,7 @@
 #: ../roundup/cgi/actions.py:58
 #, python-format
 msgid "You do not have permission to %(action)s the %(classname)s class."
-msgstr "Non hai i permessi per %{action) la classe %(classname)."
+msgstr "Non hai i permessi per %(action)s la classe %(classname)s."
 
 #: ../roundup/cgi/actions.py:89
 msgid "No type specified"
@@ -811,7 +810,7 @@
 #: ../roundup/cgi/actions.py:97
 #, python-format
 msgid "\"%(input)s\" is not an ID (%(classname)s ID required)"
-msgstr "\"%(input)\" non è un ID (%(ID della %(classname) è obbligatorio"
+msgstr "\"%(input)s\" non è un ID (ID della %(classname)s è obbligatorio)"
 
 #: ../roundup/cgi/actions.py:117
 msgid "You may not retire the admin or anonymous user"
@@ -835,7 +834,7 @@
 #: ../roundup/cgi/actions.py:298
 #, python-format
 msgid "Not enough values on line %(line)s"
-msgstr "Non abbastanza valori alla riga %(line)"
+msgstr "Non abbastanza valori alla riga %(line)s"
 
 #: ../roundup/cgi/actions.py:345
 msgid "Items edited OK"
@@ -859,12 +858,12 @@
 #: ../roundup/cgi/actions.py:452
 #, python-format
 msgid "You do not have permission to edit %(class)s"
-msgstr "Non hai i permessi per modificare i $(class)s"
+msgstr "Non hai i permessi per modificare i %(class)s"
 
 #: ../roundup/cgi/actions.py:464
 #, python-format
 msgid "You do not have permission to create %(class)s"
-msgstr "Non hai il permesso per creare $(class)s"
+msgstr "Non hai il permesso per creare %(class)s"
 
 #: ../roundup/cgi/actions.py:488
 msgid "You do not have permission to edit user roles"
@@ -978,7 +977,7 @@
 #: ../roundup/cgi/cgitb.py:76
 #, python-format
 msgid "A problem occurred in your template \"%s\"."
-msgstr "È occorso un problema nel tuo template"
+msgstr "È occorso un problema nel tuo template \"%s\"."
 
 #: ../roundup/cgi/cgitb.py:84
 #, python-format
@@ -1067,7 +1066,7 @@
 #: ../roundup/cgi/client.py:758
 #, python-format
 msgid "%(starttag)sTime elapsed: %(seconds)fs%(endtag)s\n"
-msgstr "%(starttag)sTempo trascorso: %(seconds)fs%(endtad)s\n"
+msgstr "%(starttag)sTempo trascorso: %(seconds)fs%(endtag)s\n"
 
 #: ../roundup/cgi/client.py:762
 #, python-format
@@ -1298,7 +1297,7 @@
 msgid "%(number)s year"
 msgid_plural "%(number)s years"
 msgstr[0] "%(number)s anno"
-msgstr[1] "%(numeber)s anni"
+msgstr[1] "%(number)s anni"
 
 #: ../roundup/date.py:822
 #, python-format
@@ -1852,13 +1851,13 @@
 #: ../roundup/scripts/roundup_server.py:347
 #, python-format
 msgid "User %(user)s doesn't exist"
-msgstr "L'utente $(user)s non esiste"
+msgstr "L'utente %(user)s non esiste"
 
 #: ../roundup/scripts/roundup_server.py:481
 #, python-format
 msgid "Multiprocess mode \"%s\" is not available, switching to single-process"
 msgstr ""
-"La modalità multiprocesso non è disponibile, viene utilizzata quella a "
+"La modalità multiprocesso \"%s\" non è disponibile, viene utilizzata quella a "
 "singolo processo"
 
 #: ../roundup/scripts/roundup_server.py:504

Modified: tracker/roundup-src/roundup/__init__.py
==============================================================================
--- tracker/roundup-src/roundup/__init__.py	(original)
+++ tracker/roundup-src/roundup/__init__.py	Thu Aug  4 15:46:52 2011
@@ -68,6 +68,6 @@
 '''
 __docformat__ = 'restructuredtext'
 
-__version__ = '1.4.10'
+__version__ = '1.4.19'
 
 # vim: set filetype=python ts=4 sw=4 et si

Modified: tracker/roundup-src/roundup/actions.py
==============================================================================
--- tracker/roundup-src/roundup/actions.py	(original)
+++ tracker/roundup-src/roundup/actions.py	Thu Aug  4 15:46:52 2011
@@ -49,8 +49,8 @@
         # make sure we don't try to retire admin or anonymous
         if (classname == 'user' and
             self.db.user.get(itemid, 'username') in ('admin', 'anonymous')):
-            raise ValueError, self._(
-                'You may not retire the admin or anonymous user')
+            raise ValueError(self._(
+                'You may not retire the admin or anonymous user'))
 
         # do the retire
         self.db.getclass(classname).retire(itemid)

Modified: tracker/roundup-src/roundup/admin.py
==============================================================================
--- tracker/roundup-src/roundup/admin.py	(original)
+++ tracker/roundup-src/roundup/admin.py	Thu Aug  4 15:46:52 2011
@@ -21,7 +21,7 @@
 """
 __docformat__ = 'restructuredtext'
 
-import csv, getopt, getpass, os, re, shutil, sys, UserDict
+import csv, getopt, getpass, os, re, shutil, sys, UserDict, operator
 
 from roundup import date, hyperdb, roundupdb, init, password, token
 from roundup import __version__ as roundup_version
@@ -37,16 +37,15 @@
     """
     _marker = []
     def get(self, key, default=_marker):
-        if self.data.has_key(key):
+        if key in self.data:
             return [(key, self.data[key])]
-        keylist = self.data.keys()
-        keylist.sort()
+        keylist = sorted(self.data)
         l = []
         for ki in keylist:
             if ki.startswith(key):
                 l.append((ki, self.data[ki]))
         if not l and default is self._marker:
-            raise KeyError, key
+            raise KeyError(key)
         return l
 
 class AdminTool:
@@ -63,11 +62,11 @@
     """
     def __init__(self):
         self.commands = CommandDict()
-        for k in AdminTool.__dict__.keys():
+        for k in AdminTool.__dict__:
             if k[:3] == 'do_':
                 self.commands[k[3:]] = getattr(self, k)
         self.help = {}
-        for k in AdminTool.__dict__.keys():
+        for k in AdminTool.__dict__:
             if k[:5] == 'help_':
                 self.help[k[5:]] = getattr(self, k)
         self.tracker_home = ''
@@ -80,7 +79,7 @@
         try:
             return self.db.getclass(classname)
         except KeyError:
-            raise UsageError, _('no such class "%(classname)s"')%locals()
+            raise UsageError(_('no such class "%(classname)s"')%locals())
 
     def props_from_args(self, args):
         """ Produce a dictionary of prop: value from the args list.
@@ -90,12 +89,12 @@
         props = {}
         for arg in args:
             if arg.find('=') == -1:
-                raise UsageError, _('argument "%(arg)s" not propname=value'
-                    )%locals()
+                raise UsageError(_('argument "%(arg)s" not propname=value'
+                    )%locals())
             l = arg.split('=')
             if len(l) < 2:
-                raise UsageError, _('argument "%(arg)s" not propname=value'
-                    )%locals()
+                raise UsageError(_('argument "%(arg)s" not propname=value'
+                    )%locals())
             key, value = l[0], '='.join(l[1:])
             if value:
                 props[key] = value
@@ -137,7 +136,7 @@
         """
         print _('Commands:'),
         commands = ['']
-        for command in self.commands.values():
+        for command in self.commands.itervalues():
             h = _(command.__doc__).split('\n')[0]
             commands.append(' '+h[7:])
         commands.sort()
@@ -150,10 +149,8 @@
     def help_commands_html(self, indent_re=re.compile(r'^(\s+)\S+')):
         """ Produce an HTML command list.
         """
-        commands = self.commands.values()
-        def sortfun(a, b):
-            return cmp(a.__name__, b.__name__)
-        commands.sort(sortfun)
+        commands = sorted(self.commands.itervalues(),
+            operator.attrgetter('__name__'))
         for command in commands:
             h = _(command.__doc__).split('\n')
             name = command.__name__[3:]
@@ -255,7 +252,7 @@
 
 
         # try help_ methods
-        if self.help.has_key(topic):
+        if topic in self.help:
             self.help[topic]()
             return 0
 
@@ -340,7 +337,7 @@
 
     def help_initopts(self):
         templates = self.listTemplates()
-        print _('Templates:'), ', '.join(templates.keys())
+        print _('Templates:'), ', '.join(templates)
         import roundup.backends
         backends = roundup.backends.list_backends()
         print _('Back ends:'), ', '.join(backends)
@@ -369,19 +366,19 @@
         See also initopts help.
         """
         if len(args) < 1:
-            raise UsageError, _('Not enough arguments supplied')
+            raise UsageError(_('Not enough arguments supplied'))
 
         # make sure the tracker home can be created
         tracker_home = os.path.abspath(tracker_home)
         parent = os.path.split(tracker_home)[0]
         if not os.path.exists(parent):
-            raise UsageError, _('Instance home parent directory "%(parent)s"'
-                ' does not exist')%locals()
+            raise UsageError(_('Instance home parent directory "%(parent)s"'
+                ' does not exist')%locals())
 
         config_ini_file = os.path.join(tracker_home, CoreConfig.INI_FILE)
         # check for both old- and new-style configs
-        if filter(os.path.exists, [config_ini_file,
-                os.path.join(tracker_home, 'config.py')]):
+        if list(filter(os.path.exists, [config_ini_file,
+                os.path.join(tracker_home, 'config.py')])):
             ok = raw_input(_(
 """WARNING: There appears to be a tracker in "%(tracker_home)s"!
 If you re-install it, you will lose all the data!
@@ -395,9 +392,9 @@
         # select template
         templates = self.listTemplates()
         template = len(args) > 1 and args[1] or ''
-        if not templates.has_key(template):
-            print _('Templates:'), ', '.join(templates.keys())
-        while not templates.has_key(template):
+        if template not in templates:
+            print _('Templates:'), ', '.join(templates)
+        while template not in templates:
             template = raw_input(_('Select template [classic]: ')).strip()
             if not template:
                 template = 'classic'
@@ -439,8 +436,8 @@
         need_set = CoreConfig(tracker_home)._get_unset_options()
         if need_set:
             print _(" ... at a minimum, you must set following options:")
-            for section, options in need_set.items():
-                print "   [%s]: %s" % (section, ", ".join(options))
+            for section in need_set:
+                print "   [%s]: %s" % (section, ", ".join(need_set[section]))
 
         # note about schema modifications
         print _("""
@@ -466,7 +463,7 @@
         in <filename>.
         """
         if len(args) < 1:
-            raise UsageError, _('Not enough arguments supplied')
+            raise UsageError(_('Not enough arguments supplied'))
         config = CoreConfig()
         config.save(args[0])
 
@@ -490,11 +487,11 @@
 
         # make sure the tracker home is installed
         if not os.path.exists(tracker_home):
-            raise UsageError, _('Instance home does not exist')%locals()
+            raise UsageError(_('Instance home does not exist')%locals())
         try:
             tracker = roundup.instance.open(tracker_home)
         except roundup.instance.TrackerError:
-            raise UsageError, _('Instance has not been installed')%locals()
+            raise UsageError(_('Instance has not been installed')%locals())
 
         # is there already a database?
         if tracker.exists():
@@ -511,10 +508,10 @@
             tracker.nuke()
 
             # re-write the backend select file
-            init.write_select_db(tracker_home, backend)
+            init.write_select_db(tracker_home, backend, tracker.config.DATABASE)
 
         # GO
-        tracker.init(password.Password(adminpw))
+        tracker.init(password.Password(adminpw, config=tracker.config))
 
         return 0
 
@@ -523,11 +520,14 @@
         ''"""Usage: get property designator[,designator]*
         Get the given property of one or more designator(s).
 
+        A designator is a classname and a nodeid concatenated,
+        eg. bug1, user10, ...
+
         Retrieves the property value of the nodes specified
         by the designators.
         """
         if len(args) < 2:
-            raise UsageError, _('Not enough arguments supplied')
+            raise UsageError(_('Not enough arguments supplied'))
         propname = args[0]
         designators = args[1].split(',')
         l = []
@@ -536,7 +536,7 @@
             try:
                 classname, nodeid = hyperdb.splitDesignator(designator)
             except hyperdb.DesignatorError, message:
-                raise UsageError, message
+                raise UsageError(message)
 
             # get the class
             cl = self.get_class(classname)
@@ -560,7 +560,9 @@
                         property = properties[propname]
                         if not (isinstance(property, hyperdb.Multilink) or
                           isinstance(property, hyperdb.Link)):
-                            raise UsageError, _('property %s is not of type Multilink or Link so -d flag does not apply.')%propname
+                            raise UsageError(_('property %s is not of type'
+                                ' Multilink or Link so -d flag does not '
+                                'apply.')%propname)
                         propclassname = self.db.getclass(property.classname).classname
                         id = cl.get(nodeid, propname)
                         for i in id:
@@ -575,7 +577,9 @@
                         property = properties[propname]
                         if not (isinstance(property, hyperdb.Multilink) or
                           isinstance(property, hyperdb.Link)):
-                            raise UsageError, _('property %s is not of type Multilink or Link so -d flag does not apply.')%propname
+                            raise UsageError(_('property %s is not of type'
+                                ' Multilink or Link so -d flag does not '
+                                'apply.')%propname)
                         propclassname = self.db.getclass(property.classname).classname
                         id = cl.get(nodeid, propname)
                         for i in id:
@@ -583,10 +587,11 @@
                     else:
                         print cl.get(nodeid, propname)
             except IndexError:
-                raise UsageError, _('no such %(classname)s node "%(nodeid)s"')%locals()
+                raise UsageError(_('no such %(classname)s node '
+                    '"%(nodeid)s"')%locals())
             except KeyError:
-                raise UsageError, _('no such %(classname)s property '
-                    '"%(propname)s"')%locals()
+                raise UsageError(_('no such %(classname)s property '
+                    '"%(propname)s"')%locals())
         if self.separator:
             print self.separator.join(l)
 
@@ -600,13 +605,16 @@
         The items are specified as a class or as a comma-separated
         list of item designators (ie "designator[,designator,...]").
 
+        A designator is a classname and a nodeid concatenated,
+        eg. bug1, user10, ...
+
         This command sets the properties to the values for all designators
         given. If the value is missing (ie. "property=") then the property
         is un-set. If the property is a multilink, you specify the linked
         ids for the multilink as comma-separated numbers (ie "1,2,3").
         """
         if len(args) < 2:
-            raise UsageError, _('Not enough arguments supplied')
+            raise UsageError(_('Not enough arguments supplied'))
         from roundup import hyperdb
 
         designators = args[0].split(',')
@@ -622,7 +630,7 @@
             try:
                 designators = [hyperdb.splitDesignator(x) for x in designators]
             except hyperdb.DesignatorError, message:
-                raise UsageError, message
+                raise UsageError(message)
 
         # get the props from the args
         props = self.props_from_args(args[1:])
@@ -637,14 +645,14 @@
                     props[key] = hyperdb.rawToHyperdb(self.db, cl, itemid,
                         key, value)
                 except hyperdb.HyperdbValueError, message:
-                    raise UsageError, message
+                    raise UsageError(message)
 
             # try the set
             try:
-                apply(cl.set, (itemid, ), props)
+                cl.set(itemid, **props)
             except (TypeError, IndexError, ValueError), message:
                 import traceback; traceback.print_exc()
-                raise UsageError, message
+                raise UsageError(message)
         self.db_uncommitted = True
         return 0
 
@@ -657,7 +665,7 @@
         value.
         """
         if len(args) < 1:
-            raise UsageError, _('Not enough arguments supplied')
+            raise UsageError(_('Not enough arguments supplied'))
         classname = args[0]
         # get the class
         cl = self.get_class(classname)
@@ -666,7 +674,7 @@
         props = self.props_from_args(args[1:])
 
         # convert the user-input value to a value used for find()
-        for propname, value in props.items():
+        for propname, value in props.iteritems():
             if ',' in value:
                 values = value.split(',')
             else:
@@ -686,26 +694,26 @@
             designator = []
             if self.separator:
                 if self.print_designator:
-                    id=apply(cl.find, (), props)
+                    id = cl.find(**props)
                     for i in id:
                         designator.append(classname + i)
                     print self.separator.join(designator)
                 else:
-                    print self.separator.join(apply(cl.find, (), props))
+                    print self.separator.join(cl.find(**props))
 
             else:
                 if self.print_designator:
-                    id=apply(cl.find, (), props)
+                    id = cl.find(**props)
                     for i in id:
                         designator.append(classname + i)
                     print designator
                 else:
-                    print apply(cl.find, (), props)
+                    print cl.find(**props)
         except KeyError:
-            raise UsageError, _('%(classname)s has no property '
-                '"%(propname)s"')%locals()
+            raise UsageError(_('%(classname)s has no property '
+                '"%(propname)s"')%locals())
         except (ValueError, TypeError), message:
-            raise UsageError, message
+            raise UsageError(message)
         return 0
 
     def do_specification(self, args):
@@ -715,14 +723,15 @@
         This lists the properties for a given class.
         """
         if len(args) < 1:
-            raise UsageError, _('Not enough arguments supplied')
+            raise UsageError(_('Not enough arguments supplied'))
         classname = args[0]
         # get the class
         cl = self.get_class(classname)
 
         # get the key property
         keyprop = cl.getkey()
-        for key, value in cl.properties.items():
+        for key in cl.properties:
+            value = cl.properties[key]
             if keyprop == key:
                 print _('%(key)s: %(value)s (key property)')%locals()
             else:
@@ -732,25 +741,27 @@
         ''"""Usage: display designator[,designator]*
         Show the property values for the given node(s).
 
+        A designator is a classname and a nodeid concatenated,
+        eg. bug1, user10, ...
+
         This lists the properties and their associated values for the given
         node.
         """
         if len(args) < 1:
-            raise UsageError, _('Not enough arguments supplied')
+            raise UsageError(_('Not enough arguments supplied'))
 
         # decode the node designator
         for designator in args[0].split(','):
             try:
                 classname, nodeid = hyperdb.splitDesignator(designator)
             except hyperdb.DesignatorError, message:
-                raise UsageError, message
+                raise UsageError(message)
 
             # get the class
             cl = self.get_class(classname)
 
             # display the values
-            keys = cl.properties.keys()
-            keys.sort()
+            keys = sorted(cl.properties)
             for key in keys:
                 value = cl.get(nodeid, key)
                 print _('%(key)s: %(value)s')%locals()
@@ -764,7 +775,7 @@
         command.
         """
         if len(args) < 1:
-            raise UsageError, _('Not enough arguments supplied')
+            raise UsageError(_('Not enough arguments supplied'))
         from roundup import hyperdb
 
         classname = args[0]
@@ -777,8 +788,9 @@
         properties = cl.getprops(protected = 0)
         if len(args) == 1:
             # ask for the properties
-            for key, value in properties.items():
+            for key in properties:
                 if key == 'id': continue
+                value = properties[key]
                 name = value.__class__.__name__
                 if isinstance(value , hyperdb.Password):
                     again = None
@@ -799,24 +811,24 @@
             props = self.props_from_args(args[1:])
 
         # convert types
-        for propname, value in props.items():
+        for propname in props:
             try:
                 props[propname] = hyperdb.rawToHyperdb(self.db, cl, None,
-                    propname, value)
+                    propname, props[propname])
             except hyperdb.HyperdbValueError, message:
-                raise UsageError, message
+                raise UsageError(message)
 
         # check for the key property
         propname = cl.getkey()
-        if propname and not props.has_key(propname):
-            raise UsageError, _('you must provide the "%(propname)s" '
-                'property.')%locals()
+        if propname and propname not in props:
+            raise UsageError(_('you must provide the "%(propname)s" '
+                'property.')%locals())
 
         # do the actual create
         try:
-            print apply(cl.create, (), props)
+            print cl.create(**props)
         except (TypeError, IndexError, ValueError), message:
-            raise UsageError, message
+            raise UsageError(message)
         self.db_uncommitted = True
         return 0
 
@@ -834,9 +846,9 @@
         for every class instance.
         """
         if len(args) > 2:
-            raise UsageError, _('Too many arguments supplied')
+            raise UsageError(_('Too many arguments supplied'))
         if len(args) < 1:
-            raise UsageError, _('Not enough arguments supplied')
+            raise UsageError(_('Not enough arguments supplied'))
         classname = args[0]
         
         # get the class
@@ -856,8 +868,8 @@
                     try:
                         proplist.append(cl.get(nodeid, propname))
                     except KeyError:
-                        raise UsageError, _('%(classname)s has no property '
-                            '"%(propname)s"')%locals()
+                        raise UsageError(_('%(classname)s has no property '
+                            '"%(propname)s"')%locals())
                 print self.separator.join(proplist)
             else:
                 # create a list of index id's since user didn't specify
@@ -868,8 +880,8 @@
                 try:
                     value = cl.get(nodeid, propname)
                 except KeyError:
-                    raise UsageError, _('%(classname)s has no property '
-                        '"%(propname)s"')%locals()
+                    raise UsageError(_('%(classname)s has no property '
+                        '"%(propname)s"')%locals())
                 print _('%(nodeid)4s: %(value)s')%locals()
         return 0
 
@@ -903,7 +915,7 @@
         will result in a the 4 character wide "Name" column.
         """
         if len(args) < 1:
-            raise UsageError, _('Not enough arguments supplied')
+            raise UsageError(_('Not enough arguments supplied'))
         classname = args[0]
 
         # get the class
@@ -918,14 +930,15 @@
                     try:
                         propname, width = spec.split(':')
                     except (ValueError, TypeError):
-                        raise UsageError, _('"%(spec)s" not name:width')%locals()
+                        raise UsageError(_('"%(spec)s" not '
+                            'name:width')%locals())
                 else:
                     propname = spec
-                if not all_props.has_key(propname):
-                    raise UsageError, _('%(classname)s has no property '
-                        '"%(propname)s"')%locals()
+                if propname not in all_props:
+                    raise UsageError(_('%(classname)s has no property '
+                        '"%(propname)s"')%locals())
         else:
-            prop_names = cl.getprops().keys()
+            prop_names = cl.getprops()
 
         # now figure column widths
         props = []
@@ -971,21 +984,25 @@
         ''"""Usage: history designator
         Show the history entries of a designator.
 
+        A designator is a classname and a nodeid concatenated,
+        eg. bug1, user10, ...
+
         Lists the journal entries for the node identified by the designator.
         """
         if len(args) < 1:
-            raise UsageError, _('Not enough arguments supplied')
+            raise UsageError(_('Not enough arguments supplied'))
         try:
             classname, nodeid = hyperdb.splitDesignator(args[0])
         except hyperdb.DesignatorError, message:
-            raise UsageError, message
+            raise UsageError(message)
 
         try:
             print self.db.getclass(classname).history(nodeid)
         except KeyError:
-            raise UsageError, _('no such class "%(classname)s"')%locals()
+            raise UsageError(_('no such class "%(classname)s"')%locals())
         except IndexError:
-            raise UsageError, _('no such %(classname)s node "%(nodeid)s"')%locals()
+            raise UsageError(_('no such %(classname)s node '
+                '"%(nodeid)s"')%locals())
         return 0
 
     def do_commit(self, args):
@@ -1020,23 +1037,27 @@
         ''"""Usage: retire designator[,designator]*
         Retire the node specified by designator.
 
+        A designator is a classname and a nodeid concatenated,
+        eg. bug1, user10, ...
+
         This action indicates that a particular node is not to be retrieved
         by the list or find commands, and its key value may be re-used.
         """
         if len(args) < 1:
-            raise UsageError, _('Not enough arguments supplied')
+            raise UsageError(_('Not enough arguments supplied'))
         designators = args[0].split(',')
         for designator in designators:
             try:
                 classname, nodeid = hyperdb.splitDesignator(designator)
             except hyperdb.DesignatorError, message:
-                raise UsageError, message
+                raise UsageError(message)
             try:
                 self.db.getclass(classname).retire(nodeid)
             except KeyError:
-                raise UsageError, _('no such class "%(classname)s"')%locals()
+                raise UsageError(_('no such class "%(classname)s"')%locals())
             except IndexError:
-                raise UsageError, _('no such %(classname)s node "%(nodeid)s"')%locals()
+                raise UsageError(_('no such %(classname)s node '
+                    '"%(nodeid)s"')%locals())
         self.db_uncommitted = True
         return 0
 
@@ -1044,22 +1065,26 @@
         ''"""Usage: restore designator[,designator]*
         Restore the retired node specified by designator.
 
+        A designator is a classname and a nodeid concatenated,
+        eg. bug1, user10, ...
+
         The given nodes will become available for users again.
         """
         if len(args) < 1:
-            raise UsageError, _('Not enough arguments supplied')
+            raise UsageError(_('Not enough arguments supplied'))
         designators = args[0].split(',')
         for designator in designators:
             try:
                 classname, nodeid = hyperdb.splitDesignator(designator)
             except hyperdb.DesignatorError, message:
-                raise UsageError, message
+                raise UsageError(message)
             try:
                 self.db.getclass(classname).restore(nodeid)
             except KeyError:
-                raise UsageError, _('no such class "%(classname)s"')%locals()
+                raise UsageError(_('no such class "%(classname)s"')%locals())
             except IndexError:
-                raise UsageError, _('no such %(classname)s node "%(nodeid)s"')%locals()
+                raise UsageError(_('no such %(classname)s node '
+                    '"%(nodeid)s"')%locals())
         self.db_uncommitted = True
         return 0
 
@@ -1078,19 +1103,19 @@
         """
         # grab the directory to export to
         if len(args) < 1:
-            raise UsageError, _('Not enough arguments supplied')
+            raise UsageError(_('Not enough arguments supplied'))
 
         dir = args[-1]
 
         # get the list of classes to export
         if len(args) == 2:
             if args[0].startswith('-'):
-                classes = [ c for c in self.db.classes.keys()
+                classes = [ c for c in self.db.classes
                             if not c in args[0][1:].split(',') ]
             else:
                 classes = args[0].split(',')
         else:
-            classes = self.db.classes.keys()
+            classes = self.db.classes
 
         class colon_separated(csv.excel):
             delimiter = ':'
@@ -1148,7 +1173,8 @@
                 sys.stdout.write("\nExporting Journal for %s\n" % classname)
                 sys.stdout.flush()
             journals = csv.writer(jf, colon_separated)
-            map(journals.writerow, cl.export_journals())
+            for row in cl.export_journals():
+                journals.writerow(row)
             jf.close()
         if max_len > self.db.config.CSV_FIELD_SIZE:
             print >> sys.stderr, \
@@ -1191,7 +1217,7 @@
         database (or, tediously, retire all the old data.)
         """
         if len(args) < 1:
-            raise UsageError, _('Not enough arguments supplied')
+            raise UsageError(_('Not enough arguments supplied'))
         from roundup import hyperdb
 
         if hasattr (csv, 'field_size_limit'):
@@ -1232,7 +1258,10 @@
                 if hasattr(cl, 'import_files'):
                     cl.import_files(dir, nodeid)
                 maxid = max(maxid, int(nodeid))
+
+            # (print to sys.stdout here to allow tests to squash it .. ugh)
             print >> sys.stdout
+
             f.close()
 
             # import the journals
@@ -1241,8 +1270,10 @@
             cl.import_journals(reader)
             f.close()
 
-            # set the id counter
+            # (print to sys.stdout here to allow tests to squash it .. ugh)
             print >> sys.stdout, 'setting', classname, maxid+1
+
+            # set the id counter
             self.db.setid(classname, str(maxid+1))
 
         self.db_uncommitted = True
@@ -1266,8 +1297,8 @@
             2001-01-01
 
         """
-        if len(args) <> 1:
-            raise UsageError, _('Not enough arguments supplied')
+        if len(args) != 1:
+            raise UsageError(_('Not enough arguments supplied'))
 
         # are we dealing with a period or a date
         value = args[0]
@@ -1277,7 +1308,7 @@
               """, re.VERBOSE)
         m = date_re.match(value)
         if not m:
-            raise ValueError, _('Invalid format')
+            raise ValueError(_('Invalid format'))
         m = m.groupdict()
         if m['period']:
             pack_before = date.Date(". - %s"%value)
@@ -1302,8 +1333,8 @@
                     try:
                         cl.index(m.group(2))
                     except IndexError:
-                        raise UsageError, _('no such item "%(designator)s"')%{
-                            'designator': arg}
+                        raise UsageError(_('no such item "%(designator)s"')%{
+                            'designator': arg})
                 else:
                     cl = self.get_class(arg)
                     self.db.reindex(arg)
@@ -1323,7 +1354,7 @@
                 print _('No such Role "%(role)s"')%locals()
                 return 1
         else:
-            roles = self.db.security.role.items()
+            roles = list(self.db.security.role.items())
             role = self.db.config.NEW_WEB_USER_ROLES
             if ',' in role:
                 print _('New Web users get the Roles "%(role)s"')%locals()
@@ -1499,7 +1530,7 @@
         self.tracker_home = os.environ.get('TRACKER_HOME', '')
         # TODO: reinstate the user/password stuff (-u arg too)
         name = password = ''
-        if os.environ.has_key('ROUNDUP_LOGIN'):
+        if 'ROUNDUP_LOGIN' in os.environ:
             l = os.environ['ROUNDUP_LOGIN'].split(':')
             name = l[0]
             if len(l) > 1:

Added: tracker/roundup-src/roundup/anypy/cookie_.py
==============================================================================
--- (empty file)
+++ tracker/roundup-src/roundup/anypy/cookie_.py	Thu Aug  4 15:46:52 2011
@@ -0,0 +1,8 @@
+
+try:
+    from http import cookies as Cookie
+    from http.cookies import CookieError, BaseCookie, SimpleCookie
+    from http.cookies import _getdate as get_cookie_date
+except:
+    from Cookie import CookieError, BaseCookie, SimpleCookie
+    from Cookie import _getdate as get_cookie_date

Added: tracker/roundup-src/roundup/anypy/dbm_.py
==============================================================================
--- (empty file)
+++ tracker/roundup-src/roundup/anypy/dbm_.py	Thu Aug  4 15:46:52 2011
@@ -0,0 +1,20 @@
+# In Python 3 the "anydbm" module was renamed to be "dbm" which is now a
+# package containing the various implementations. The "wichdb" module's
+# whichdb() function was moved to the new "dbm" module.
+
+import sys
+if sys.version_info[:2] < (2, 6):
+    def key_in(db, key):
+        return db.has_key(key)
+else:
+    def key_in(db, key):
+        return key in db
+
+try:
+    # old school first because <3 had a "dbm" module too...
+    import anydbm
+    from whichdb import whichdb
+except ImportError:
+    # python 3+
+    import dbm as anydbm
+    whichdb = anydbm.whichdb

Added: tracker/roundup-src/roundup/anypy/email_.py
==============================================================================
--- (empty file)
+++ tracker/roundup-src/roundup/anypy/email_.py	Thu Aug  4 15:46:52 2011
@@ -0,0 +1,19 @@
+try:
+    # Python 2.5+
+    from email.parser import FeedParser
+except ImportError:
+    # Python 2.4
+    try :
+        from email.Parser import FeedParser
+    except ImportError:
+        from email.Parser import Parser
+        class FeedParser:
+            def __init__(self):
+                self.content = []
+
+            def feed(self, s):
+                self.content.append(s)
+
+            def close(self):
+                p = Parser()
+                return p.parsestr(''.join(self.content))

Modified: tracker/roundup-src/roundup/anypy/hashlib_.py
==============================================================================
--- tracker/roundup-src/roundup/anypy/hashlib_.py	(original)
+++ tracker/roundup-src/roundup/anypy/hashlib_.py	Thu Aug  4 15:46:52 2011
@@ -4,8 +4,10 @@
 
 try:
     from hashlib import md5, sha1 # new in Python 2.5
+    shamodule = sha1
 except ImportError:
     from md5 import md5           # deprecated in Python 2.6
     from sha import sha as sha1   # deprecated in Python 2.6
+    import sha as shamodule
 
 # vim: ts=8 sts=4 sw=4 si

Added: tracker/roundup-src/roundup/anypy/http_.py
==============================================================================
--- (empty file)
+++ tracker/roundup-src/roundup/anypy/http_.py	Thu Aug  4 15:46:52 2011
@@ -0,0 +1,5 @@
+try:
+    from http import client
+except:
+    import httplib as client
+

Added: tracker/roundup-src/roundup/anypy/io_.py
==============================================================================
--- (empty file)
+++ tracker/roundup-src/roundup/anypy/io_.py	Thu Aug  4 15:46:52 2011
@@ -0,0 +1,7 @@
+
+try:
+    from io import StringIO, BytesIO
+except:
+    from StringIO import StringIO
+    BytesIO = StringIO
+

Modified: tracker/roundup-src/roundup/anypy/sets_.py
==============================================================================
--- tracker/roundup-src/roundup/anypy/sets_.py	(original)
+++ tracker/roundup-src/roundup/anypy/sets_.py	Thu Aug  4 15:46:52 2011
@@ -24,7 +24,7 @@
 
 try:
     set = set                     # built-in since Python 2.4
-except NameError, TypeError:
+except (NameError, TypeError):
     from sets import Set as set   # deprecated as of Python 2.6
 
 # vim: ts=8 sts=4 sw=4 si et

Added: tracker/roundup-src/roundup/anypy/urllib_.py
==============================================================================
--- (empty file)
+++ tracker/roundup-src/roundup/anypy/urllib_.py	Thu Aug  4 15:46:52 2011
@@ -0,0 +1,6 @@
+
+try:
+    from urllib.parse import quote, urlparse
+except:
+    from urllib import quote
+    from urlparse import urlparse

Modified: tracker/roundup-src/roundup/backends/__init__.py
==============================================================================
--- tracker/roundup-src/roundup/backends/__init__.py	(original)
+++ tracker/roundup-src/roundup/backends/__init__.py	Thu Aug  4 15:46:52 2011
@@ -38,7 +38,7 @@
     '''Get a specific backend by name.'''
     vars = globals()
     # if requested backend has been imported yet, return current instance
-    if vars.has_key(name):
+    if name in vars:
         return vars[name]
     # import the backend module
     module_name = 'back_%s' % name

Modified: tracker/roundup-src/roundup/backends/back_anydbm.py
==============================================================================
--- tracker/roundup-src/roundup/backends/back_anydbm.py	(original)
+++ tracker/roundup-src/roundup/backends/back_anydbm.py	Thu Aug  4 15:46:52 2011
@@ -22,30 +22,22 @@
 """
 __docformat__ = 'restructuredtext'
 
-try:
-    import anydbm, sys
-    # dumbdbm only works in python 2.1.2+
-    if sys.version_info < (2,1,2):
-        import dumbdbm
-        assert anydbm._defaultmod != dumbdbm
-        del dumbdbm
-except AssertionError:
-    print "WARNING: you should upgrade to python 2.1.3"
+import os, marshal, re, weakref, string, copy, time, shutil, logging
 
-import whichdb, os, marshal, re, weakref, string, copy, time, shutil, logging
+from roundup.anypy.dbm_ import anydbm, whichdb, key_in
 
 from roundup import hyperdb, date, password, roundupdb, security, support
 from roundup.support import reversed
 from roundup.backends import locking
 from roundup.i18n import _
 
-from blobfiles import FileStorage
-from sessions_dbm import Sessions, OneTimeKeys
+from roundup.backends.blobfiles import FileStorage
+from roundup.backends.sessions_dbm import Sessions, OneTimeKeys
 
 try:
-    from indexer_xapian import Indexer
+    from roundup.backends.indexer_xapian import Indexer
 except ImportError:
-    from indexer_dbm import Indexer
+    from roundup.backends.indexer_dbm import Indexer
 
 def db_exists(config):
     # check for the user db
@@ -57,6 +49,87 @@
 def db_nuke(config):
     shutil.rmtree(config.DATABASE)
 
+class Binary:
+
+    def __init__(self, x, y):
+        self.x = x
+        self.y = y
+
+    def visit(self, visitor):
+        self.x.visit(visitor)
+        self.y.visit(visitor)
+
+class Unary:
+
+    def __init__(self, x):
+        self.x = x
+
+    def generate(self, atom):
+        return atom(self)
+
+    def visit(self, visitor):
+        self.x.visit(visitor)
+
+class Equals(Unary):
+
+    def evaluate(self, v):
+        return self.x in v
+
+    def visit(self, visitor):
+        visitor(self)
+
+class Not(Unary):
+
+    def evaluate(self, v):
+        return not self.x.evaluate(v)
+
+    def generate(self, atom):
+        return "NOT(%s)" % self.x.generate(atom)
+
+class Or(Binary):
+
+    def evaluate(self, v):
+        return self.x.evaluate(v) or self.y.evaluate(v)
+
+    def generate(self, atom):
+        return "(%s)OR(%s)" % (
+            self.x.generate(atom),
+            self.y.generate(atom))
+
+class And(Binary):
+
+    def evaluate(self, v):
+        return self.x.evaluate(v) and self.y.evaluate(v)
+
+    def generate(self, atom):
+        return "(%s)AND(%s)" % (
+            self.x.generate(atom),
+            self.y.generate(atom))
+
+def compile_expression(opcodes):
+
+    stack = []
+    push, pop = stack.append, stack.pop
+    for opcode in opcodes:
+        if   opcode == -2: push(Not(pop()))
+        elif opcode == -3: push(And(pop(), pop()))
+        elif opcode == -4: push(Or(pop(), pop()))
+        else:              push(Equals(opcode))
+
+    return pop()
+
+class Expression:
+
+    def __init__(self, v):
+        try:
+            opcodes = [int(x) for x in v]
+            if min(opcodes) >= -1: raise ValueError()
+
+            compiled = compile_expression(opcodes)
+            self.evaluate = lambda x: compiled.evaluate([int(y) for y in x])
+        except:
+            self.evaluate = lambda x: bool(set(x) & set(v))
+
 #
 # Now the database
 #
@@ -146,13 +219,13 @@
     #
     def __getattr__(self, classname):
         """A convenient way of calling self.getclass(classname)."""
-        if self.classes.has_key(classname):
+        if classname in self.classes:
             return self.classes[classname]
         raise AttributeError, classname
 
     def addclass(self, cl):
         cn = cl.classname
-        if self.classes.has_key(cn):
+        if cn in self.classes:
             raise ValueError, cn
         self.classes[cn] = cl
 
@@ -163,6 +236,8 @@
             description="User is allowed to edit "+cn)
         self.security.addPermission(name="View", klass=cn,
             description="User is allowed to access "+cn)
+        self.security.addPermission(name="Retire", klass=cn,
+            description="User is allowed to retire "+cn)
 
     def getclasses(self):
         """Return a list of the names of all existing classes."""
@@ -178,7 +253,7 @@
         try:
             return self.classes[classname]
         except KeyError:
-            raise KeyError, 'There is no class called "%s"'%classname
+            raise KeyError('There is no class called "%s"'%classname)
 
     #
     # Class DBs
@@ -186,8 +261,8 @@
     def clear(self):
         """Delete all database contents
         """
-        logging.getLogger('hyperdb').info('clear')
-        for cn in self.classes.keys():
+        logging.getLogger('roundup.hyperdb').info('clear')
+        for cn in self.classes:
             for dummy in 'nodes', 'journals':
                 path = os.path.join(self.dir, 'journals.%s'%cn)
                 if os.path.exists(path):
@@ -212,10 +287,9 @@
         """
         db_type = ''
         if os.path.exists(path):
-            db_type = whichdb.whichdb(path)
+            db_type = whichdb(path)
             if not db_type:
-                raise hyperdb.DatabaseError, \
-                    _("Couldn't identify database type")
+                raise hyperdb.DatabaseError(_("Couldn't identify database type"))
         elif os.path.exists(path+'.db'):
             # if the path ends in '.db', it's a dbm database, whether
             # anydbm says it's dbhash or not!
@@ -231,21 +305,24 @@
         db_type = self.determine_db_type(path)
 
         # new database? let anydbm pick the best dbm
-        if not db_type:
+        # in Python 3+ the "dbm" ("anydbm" to us) module already uses the
+        # whichdb() function to do this
+        if not db_type or hasattr(anydbm, 'whichdb'):
             if __debug__:
-                logging.getLogger('hyperdb').debug("opendb anydbm.open(%r, 'c')"%path)
+                logging.getLogger('roundup.hyperdb').debug(
+                    "opendb anydbm.open(%r, 'c')"%path)
             return anydbm.open(path, 'c')
 
-        # open the database with the correct module
+        # in Python <3 it anydbm was a little dumb so manually open the
+        # database with the correct module
         try:
             dbm = __import__(db_type)
         except ImportError:
-            raise hyperdb.DatabaseError, \
-                _("Couldn't open database - the required module '%s'"\
-                " is not available")%db_type
+            raise hyperdb.DatabaseError(_("Couldn't open database - the "
+                "required module '%s' is not available")%db_type)
         if __debug__:
-            logging.getLogger('hyperdb').debug("opendb %r.open(%r, %r)"%(db_type, path,
-                mode))
+            logging.getLogger('roundup.hyperdb').debug(
+                "opendb %r.open(%r, %r)"%(db_type, path, mode))
         return dbm.open(path, mode)
 
     #
@@ -256,7 +333,7 @@
         """
         # open the ids DB - create if if doesn't exist
         db = self.opendb('_ids', 'c')
-        if db.has_key(classname):
+        if key_in(db, classname):
             newid = db[classname] = str(int(db[classname]) + 1)
         else:
             # the count() bit is transitional - older dbs won't start at 1
@@ -280,7 +357,7 @@
         """ add the specified node to its class's db
         """
         # we'll be supplied these props if we're doing an import
-        if not node.has_key('creator'):
+        if 'creator' not in node:
             # add in the "calculated" properties (dupe so we don't affect
             # calling code's node assumptions)
             node = node.copy()
@@ -305,7 +382,8 @@
         """ perform the saving of data specified by the set/addnode
         """
         if __debug__:
-            logging.getLogger('hyperdb').debug('save %s%s %r'%(classname, nodeid, node))
+            logging.getLogger('roundup.hyperdb').debug(
+                'save %s%s %r'%(classname, nodeid, node))
         self.transactions.append((self.doSaveNode, (classname, nodeid, node)))
 
     def getnode(self, classname, nodeid, db=None, cache=1):
@@ -316,27 +394,29 @@
         """
         # try the cache
         cache_dict = self.cache.setdefault(classname, {})
-        if cache_dict.has_key(nodeid):
+        if nodeid in cache_dict:
             if __debug__:
-                logging.getLogger('hyperdb').debug('get %s%s cached'%(classname, nodeid))
+                logging.getLogger('roundup.hyperdb').debug(
+                    'get %s%s cached'%(classname, nodeid))
                 self.stats['cache_hits'] += 1
             return cache_dict[nodeid]
 
         if __debug__:
             self.stats['cache_misses'] += 1
             start_t = time.time()
-            logging.getLogger('hyperdb').debug('get %s%s'%(classname, nodeid))
+            logging.getLogger('roundup.hyperdb').debug(
+                'get %s%s'%(classname, nodeid))
 
         # get from the database and save in the cache
         if db is None:
             db = self.getclassdb(classname)
-        if not db.has_key(nodeid):
-            raise IndexError, "no such %s %s"%(classname, nodeid)
+        if not key_in(db, nodeid):
+            raise IndexError("no such %s %s"%(classname, nodeid))
 
         # check the uncommitted, destroyed nodes
-        if (self.destroyednodes.has_key(classname) and
-                self.destroyednodes[classname].has_key(nodeid)):
-            raise IndexError, "no such %s %s"%(classname, nodeid)
+        if (classname in self.destroyednodes and
+                nodeid in self.destroyednodes[classname]):
+            raise IndexError("no such %s %s"%(classname, nodeid))
 
         # decode
         res = marshal.loads(db[nodeid])
@@ -357,14 +437,13 @@
         """Remove a node from the database. Called exclusively by the
            destroy() method on Class.
         """
-        logging.getLogger('hyperdb').info('destroy %s%s'%(classname, nodeid))
+        logging.getLogger('roundup.hyperdb').info(
+            'destroy %s%s'%(classname, nodeid))
 
         # remove from cache and newnodes if it's there
-        if (self.cache.has_key(classname) and
-                self.cache[classname].has_key(nodeid)):
+        if (classname in self.cache and nodeid in self.cache[classname]):
             del self.cache[classname][nodeid]
-        if (self.newnodes.has_key(classname) and
-                self.newnodes[classname].has_key(nodeid)):
+        if (classname in self.newnodes and nodeid in self.newnodes[classname]):
             del self.newnodes[classname][nodeid]
 
         # see if there's any obvious commit actions that we should get rid of
@@ -385,13 +464,13 @@
         """
         properties = self.getclass(classname).getprops()
         d = {}
-        for k, v in node.items():
+        for k, v in node.iteritems():
             if k == self.RETIRED_FLAG:
                 d[k] = v
                 continue
 
             # if the property doesn't exist then we really don't care
-            if not properties.has_key(k):
+            if k not in properties:
                 continue
 
             # get the property spec
@@ -412,10 +491,10 @@
         """
         properties = self.getclass(classname).getprops()
         d = {}
-        for k, v in node.items():
+        for k, v in node.iteritems():
             # if the property doesn't exist, or is the "retired" flag then
             # it won't be in the properties dict
-            if not properties.has_key(k):
+            if k not in properties:
                 d[k] = v
                 continue
 
@@ -427,9 +506,7 @@
             elif isinstance(prop, hyperdb.Interval) and v is not None:
                 d[k] = date.Interval(v)
             elif isinstance(prop, hyperdb.Password) and v is not None:
-                p = password.Password()
-                p.unpack(v)
-                d[k] = p
+                d[k] = password.Password(encrypted=v)
             else:
                 d[k] = v
         return d
@@ -439,29 +516,27 @@
         """
         # try the cache
         cache = self.cache.setdefault(classname, {})
-        if cache.has_key(nodeid):
+        if nodeid in cache:
             return 1
 
         # not in the cache - check the database
         if db is None:
             db = self.getclassdb(classname)
-        res = db.has_key(nodeid)
-        return res
+        return key_in(db, nodeid)
 
     def countnodes(self, classname, db=None):
         count = 0
 
         # include the uncommitted nodes
-        if self.newnodes.has_key(classname):
+        if classname in self.newnodes:
             count += len(self.newnodes[classname])
-        if self.destroyednodes.has_key(classname):
+        if classname in self.destroyednodes:
             count -= len(self.destroyednodes[classname])
 
         # and count those in the DB
         if db is None:
             db = self.getclassdb(classname)
-        count = count + len(db.keys())
-        return count
+        return count + len(db)
 
 
     #
@@ -484,7 +559,8 @@
             the current user.
         """
         if __debug__:
-            logging.getLogger('hyperdb').debug('addjournal %s%s %s %r %s %r'%(classname,
+            logging.getLogger('roundup.hyperdb').debug(
+                'addjournal %s%s %s %r %s %r'%(classname,
                 nodeid, action, params, creator, creation))
         if creator is None:
             creator = self.getuid()
@@ -494,8 +570,8 @@
     def setjournal(self, classname, nodeid, journal):
         """Set the journal to the "journal" list."""
         if __debug__:
-            logging.getLogger('hyperdb').debug('setjournal %s%s %r'%(classname,
-                nodeid, journal))
+            logging.getLogger('roundup.hyperdb').debug(
+                'setjournal %s%s %r'%(classname, nodeid, journal))
         self.transactions.append((self.doSetJournal, (classname, nodeid,
             journal)))
 
@@ -529,14 +605,14 @@
             db = self.opendb('journals.%s'%classname, 'r')
         except anydbm.error, error:
             if str(error) == "need 'c' or 'n' flag to open new db":
-                raise IndexError, 'no such %s %s'%(classname, nodeid)
+                raise IndexError('no such %s %s'%(classname, nodeid))
             elif error.args[0] != 2:
                 # this isn't a "not found" error, be alarmed!
                 raise
             if res:
                 # we have unsaved journal entries, return them
                 return res
-            raise IndexError, 'no such %s %s'%(classname, nodeid)
+            raise IndexError('no such %s %s'%(classname, nodeid))
         try:
             journal = marshal.loads(db[nodeid])
         except KeyError:
@@ -544,7 +620,7 @@
             if res:
                 # we have some unsaved journal entries, be happy!
                 return res
-            raise IndexError, 'no such %s %s'%(classname, nodeid)
+            raise IndexError('no such %s %s'%(classname, nodeid))
         db.close()
 
         # add all the saved journal entries for this node
@@ -581,8 +657,8 @@
                         packed += 1
                 db[key] = marshal.dumps(l)
 
-                logging.getLogger('hyperdb').info('packed %d %s items'%(packed,
-                    classname))
+                logging.getLogger('roundup.hyperdb').info(
+                    'packed %d %s items'%(packed, classname))
 
             if db_type == 'gdbm':
                 db.reorganize()
@@ -604,7 +680,7 @@
 
         The only backend this seems to affect is postgres.
         """
-        logging.getLogger('hyperdb').info('commit %s transactions'%(
+        logging.getLogger('roundup.hyperdb').info('commit %s transactions'%(
             len(self.transactions)))
 
         # keep a handle to all the database files opened
@@ -617,7 +693,7 @@
                 reindex[method(*args)] = 1
         finally:
             # make sure we close all the database files
-            for db in self.databases.values():
+            for db in self.databases.itervalues():
                 db.close()
             del self.databases
 
@@ -627,7 +703,7 @@
         self.transactions = []
 
         # reindex the nodes that request it
-        for classname, nodeid in filter(None, reindex.keys()):
+        for classname, nodeid in [k for k in reindex if k]:
             self.getclass(classname).index(nodeid)
 
         # save the indexer state
@@ -648,7 +724,7 @@
         """
         # get the database handle
         db_name = 'nodes.%s'%classname
-        if not self.databases.has_key(db_name):
+        if db_name not in self.databases:
             self.databases[db_name] = self.getclassdb(classname, 'c')
         return self.databases[db_name]
 
@@ -666,7 +742,7 @@
         """
         # get the database handle
         db_name = 'journals.%s'%classname
-        if not self.databases.has_key(db_name):
+        if db_name not in self.databases:
             self.databases[db_name] = self.opendb(db_name, 'c')
         return self.databases[db_name]
 
@@ -691,7 +767,7 @@
         db = self.getCachedJournalDB(classname)
 
         # now insert the journal entry
-        if db.has_key(nodeid):
+        if key_in(db, nodeid):
             # append to existing
             s = db[nodeid]
             l = marshal.loads(s)
@@ -716,18 +792,18 @@
     def doDestroyNode(self, classname, nodeid):
         # delete from the class database
         db = self.getCachedClassDB(classname)
-        if db.has_key(nodeid):
+        if key_in(db, nodeid):
             del db[nodeid]
 
         # delete from the database
         db = self.getCachedJournalDB(classname)
-        if db.has_key(nodeid):
+        if key_in(db, nodeid):
             del db[nodeid]
 
     def rollback(self):
         """ Reverse all actions from the current transaction.
         """
-        logging.getLogger('hyperdb').info('rollback %s transactions'%(
+        logging.getLogger('roundup.hyperdb').info('rollback %s transactions'%(
             len(self.transactions)))
 
         for method, args in self.transactions:
@@ -784,6 +860,8 @@
         These operations trigger detectors and can be vetoed.  Attempts
         to modify the "creation" or "activity" properties cause a KeyError.
         """
+        if self.db.journaltag is None:
+            raise hyperdb.DatabaseError(_('Database open read-only'))
         self.fireAuditors('create', None, propvalues)
         newid = self.create_inner(**propvalues)
         self.fireReactors('create', newid, None)
@@ -792,48 +870,49 @@
     def create_inner(self, **propvalues):
         """ Called by create, in-between the audit and react calls.
         """
-        if propvalues.has_key('id'):
-            raise KeyError, '"id" is reserved'
+        if 'id' in propvalues:
+            raise KeyError('"id" is reserved')
 
         if self.db.journaltag is None:
-            raise hyperdb.DatabaseError, _('Database open read-only')
+            raise hyperdb.DatabaseError(_('Database open read-only'))
 
-        if propvalues.has_key('creation') or propvalues.has_key('activity'):
-            raise KeyError, '"creation" and "activity" are reserved'
+        if 'creation' in propvalues or 'activity' in propvalues:
+            raise KeyError('"creation" and "activity" are reserved')
         # new node's id
         newid = self.db.newid(self.classname)
 
         # validate propvalues
         num_re = re.compile('^\d+$')
-        for key, value in propvalues.items():
+        for key, value in propvalues.iteritems():
             if key == self.key:
                 try:
                     self.lookup(value)
                 except KeyError:
                     pass
                 else:
-                    raise ValueError, 'node with key "%s" exists'%value
+                    raise ValueError('node with key "%s" exists'%value)
 
             # try to handle this property
             try:
                 prop = self.properties[key]
             except KeyError:
-                raise KeyError, '"%s" has no property "%s"'%(self.classname,
-                    key)
+                raise KeyError('"%s" has no property "%s"'%(self.classname,
+                    key))
 
             if value is not None and isinstance(prop, hyperdb.Link):
                 if type(value) != type(''):
-                    raise ValueError, 'link value must be String'
+                    raise ValueError('link value must be String')
                 link_class = self.properties[key].classname
                 # if it isn't a number, it's a key
                 if not num_re.match(value):
                     try:
                         value = self.db.classes[link_class].lookup(value)
                     except (TypeError, KeyError):
-                        raise IndexError, 'new property "%s": %s not a %s'%(
-                            key, value, link_class)
+                        raise IndexError('new property "%s": %s not a %s'%(
+                            key, value, link_class))
                 elif not self.db.getclass(link_class).hasnode(value):
-                    raise IndexError, '%s has no node %s'%(link_class, value)
+                    raise IndexError('%s has no node %s'%(link_class,
+                        value))
 
                 # save off the value
                 propvalues[key] = value
@@ -847,22 +926,22 @@
                 if value is None:
                     value = []
                 if not hasattr(value, '__iter__'):
-                    raise TypeError, 'new property "%s" not an iterable of ids'%key
+                    raise TypeError('new property "%s" not an iterable of ids'%key)
 
                 # clean up and validate the list of links
                 link_class = self.properties[key].classname
                 l = []
                 for entry in value:
                     if type(entry) != type(''):
-                        raise ValueError, '"%s" multilink value (%r) '\
-                            'must contain Strings'%(key, value)
+                        raise ValueError('"%s" multilink value (%r) '\
+                            'must contain Strings'%(key, value))
                     # if it isn't a number, it's a key
                     if not num_re.match(entry):
                         try:
                             entry = self.db.classes[link_class].lookup(entry)
                         except (TypeError, KeyError):
-                            raise IndexError, 'new property "%s": %s not a %s'%(
-                                key, entry, self.properties[key].classname)
+                            raise IndexError('new property "%s": %s not a %s'%(
+                                key, entry, self.properties[key].classname))
                     l.append(entry)
                 value = l
                 propvalues[key] = value
@@ -870,8 +949,8 @@
                 # handle additions
                 for nodeid in value:
                     if not self.db.getclass(link_class).hasnode(nodeid):
-                        raise IndexError, '%s has no node %s'%(link_class,
-                            nodeid)
+                        raise IndexError('%s has no node %s'%(link_class,
+                            nodeid))
                     # register the link with the newly linked node
                     if self.do_journal and self.properties[key].do_journal:
                         self.db.addjournal(link_class, nodeid, 'link',
@@ -879,41 +958,41 @@
 
             elif isinstance(prop, hyperdb.String):
                 if type(value) != type('') and type(value) != type(u''):
-                    raise TypeError, 'new property "%s" not a string'%key
+                    raise TypeError('new property "%s" not a string'%key)
                 if prop.indexme:
                     self.db.indexer.add_text((self.classname, newid, key),
                         value)
 
             elif isinstance(prop, hyperdb.Password):
                 if not isinstance(value, password.Password):
-                    raise TypeError, 'new property "%s" not a Password'%key
+                    raise TypeError('new property "%s" not a Password'%key)
 
             elif isinstance(prop, hyperdb.Date):
                 if value is not None and not isinstance(value, date.Date):
-                    raise TypeError, 'new property "%s" not a Date'%key
+                    raise TypeError('new property "%s" not a Date'%key)
 
             elif isinstance(prop, hyperdb.Interval):
                 if value is not None and not isinstance(value, date.Interval):
-                    raise TypeError, 'new property "%s" not an Interval'%key
+                    raise TypeError('new property "%s" not an Interval'%key)
 
             elif value is not None and isinstance(prop, hyperdb.Number):
                 try:
                     float(value)
                 except ValueError:
-                    raise TypeError, 'new property "%s" not numeric'%key
+                    raise TypeError('new property "%s" not numeric'%key)
 
             elif value is not None and isinstance(prop, hyperdb.Boolean):
                 try:
                     int(value)
                 except ValueError:
-                    raise TypeError, 'new property "%s" not boolean'%key
+                    raise TypeError('new property "%s" not boolean'%key)
 
         # make sure there's data where there needs to be
-        for key, prop in self.properties.items():
-            if propvalues.has_key(key):
+        for key, prop in self.properties.iteritems():
+            if key in propvalues:
                 continue
             if key == self.key:
-                raise ValueError, 'key property "%s" is required'%key
+                raise ValueError('key property "%s" is required'%key)
             if isinstance(prop, hyperdb.Multilink):
                 propvalues[key] = []
 
@@ -944,21 +1023,21 @@
 
         # check for one of the special props
         if propname == 'creation':
-            if d.has_key('creation'):
+            if 'creation' in d:
                 return d['creation']
             if not self.do_journal:
-                raise ValueError, 'Journalling is disabled for this class'
+                raise ValueError('Journalling is disabled for this class')
             journal = self.db.getjournal(self.classname, nodeid)
             if journal:
-                return self.db.getjournal(self.classname, nodeid)[0][1]
+                return journal[0][1]
             else:
                 # on the strange chance that there's no journal
                 return date.Date()
         if propname == 'activity':
-            if d.has_key('activity'):
+            if 'activity' in d:
                 return d['activity']
             if not self.do_journal:
-                raise ValueError, 'Journalling is disabled for this class'
+                raise ValueError('Journalling is disabled for this class')
             journal = self.db.getjournal(self.classname, nodeid)
             if journal:
                 return self.db.getjournal(self.classname, nodeid)[-1][1]
@@ -966,10 +1045,10 @@
                 # on the strange chance that there's no journal
                 return date.Date()
         if propname == 'creator':
-            if d.has_key('creator'):
+            if 'creator' in d:
                 return d['creator']
             if not self.do_journal:
-                raise ValueError, 'Journalling is disabled for this class'
+                raise ValueError('Journalling is disabled for this class')
             journal = self.db.getjournal(self.classname, nodeid)
             if journal:
                 num_re = re.compile('^\d+$')
@@ -986,10 +1065,10 @@
             else:
                 return self.db.getuid()
         if propname == 'actor':
-            if d.has_key('actor'):
+            if 'actor' in d:
                 return d['actor']
             if not self.do_journal:
-                raise ValueError, 'Journalling is disabled for this class'
+                raise ValueError('Journalling is disabled for this class')
             journal = self.db.getjournal(self.classname, nodeid)
             if journal:
                 num_re = re.compile('^\d+$')
@@ -1009,7 +1088,7 @@
         # get the property (raises KeyErorr if invalid)
         prop = self.properties[propname]
 
-        if not d.has_key(propname):
+        if propname not in d:
             if default is _marker:
                 if isinstance(prop, hyperdb.Multilink):
                     return []
@@ -1045,10 +1124,13 @@
         These operations trigger detectors and can be vetoed.  Attempts
         to modify the "creation" or "activity" properties cause a KeyError.
         """
+        if self.db.journaltag is None:
+            raise hyperdb.DatabaseError(_('Database open read-only'))
+
         self.fireAuditors('set', nodeid, propvalues)
         oldvalues = copy.deepcopy(self.db.getnode(self.classname, nodeid))
-        for name,prop in self.getprops(protected=0).items():
-            if oldvalues.has_key(name):
+        for name, prop in self.getprops(protected=0).iteritems():
+            if name in oldvalues:
                 continue
             if isinstance(prop, hyperdb.Multilink):
                 oldvalues[name] = []
@@ -1064,24 +1146,25 @@
         if not propvalues:
             return propvalues
 
-        if propvalues.has_key('creation') or propvalues.has_key('activity'):
+        if 'creation' in propvalues or 'activity' in propvalues:
             raise KeyError, '"creation" and "activity" are reserved'
 
-        if propvalues.has_key('id'):
+        if 'id' in propvalues:
             raise KeyError, '"id" is reserved'
 
         if self.db.journaltag is None:
-            raise hyperdb.DatabaseError, _('Database open read-only')
+            raise hyperdb.DatabaseError(_('Database open read-only'))
 
         node = self.db.getnode(self.classname, nodeid)
-        if node.has_key(self.db.RETIRED_FLAG):
+        if self.db.RETIRED_FLAG in node:
             raise IndexError
         num_re = re.compile('^\d+$')
 
         # if the journal value is to be different, store it in here
         journalvalues = {}
 
-        for propname, value in propvalues.items():
+        # list() propvalues 'cos it might be modified by the loop
+        for propname, value in list(propvalues.items()):
             # check to make sure we're not duplicating an existing key
             if propname == self.key and node[propname] != value:
                 try:
@@ -1089,7 +1172,7 @@
                 except KeyError:
                     pass
                 else:
-                    raise ValueError, 'node with key "%s" exists'%value
+                    raise ValueError('node with key "%s" exists'%value)
 
             # this will raise the KeyError if the property isn't valid
             # ... we don't use getprops() here because we only care about
@@ -1097,8 +1180,8 @@
             try:
                 prop = self.properties[propname]
             except KeyError:
-                raise KeyError, '"%s" has no property named "%s"'%(
-                    self.classname, propname)
+                raise KeyError('"%s" has no property named "%s"'%(
+                    self.classname, propname))
 
             # if the value's the same as the existing value, no sense in
             # doing anything
@@ -1113,22 +1196,23 @@
                 link_class = prop.classname
                 # if it isn't a number, it's a key
                 if value is not None and not isinstance(value, type('')):
-                    raise ValueError, 'property "%s" link value be a string'%(
-                        propname)
+                    raise ValueError('property "%s" link value be a string'%(
+                        propname))
                 if isinstance(value, type('')) and not num_re.match(value):
                     try:
                         value = self.db.classes[link_class].lookup(value)
                     except (TypeError, KeyError):
-                        raise IndexError, 'new property "%s": %s not a %s'%(
-                            propname, value, prop.classname)
+                        raise IndexError('new property "%s": %s not a %s'%(
+                            propname, value, prop.classname))
 
                 if (value is not None and
                         not self.db.getclass(link_class).hasnode(value)):
-                    raise IndexError, '%s has no node %s'%(link_class, value)
+                    raise IndexError('%s has no node %s'%(link_class,
+                        value))
 
                 if self.do_journal and prop.do_journal:
                     # register the unlink with the old linked node
-                    if node.has_key(propname) and node[propname] is not None:
+                    if propname in node and node[propname] is not None:
                         self.db.addjournal(link_class, node[propname], 'unlink',
                             (self.classname, nodeid, propname))
 
@@ -1141,22 +1225,22 @@
                 if value is None:
                     value = []
                 if not hasattr(value, '__iter__'):
-                    raise TypeError, 'new property "%s" not an iterable of'\
-                        ' ids'%propname
+                    raise TypeError('new property "%s" not an iterable of'
+                        ' ids'%propname)
                 link_class = self.properties[propname].classname
                 l = []
                 for entry in value:
                     # if it isn't a number, it's a key
                     if type(entry) != type(''):
-                        raise ValueError, 'new property "%s" link value ' \
-                            'must be a string'%propname
+                        raise ValueError('new property "%s" link value '
+                            'must be a string'%propname)
                     if not num_re.match(entry):
                         try:
                             entry = self.db.classes[link_class].lookup(entry)
                         except (TypeError, KeyError):
-                            raise IndexError, 'new property "%s": %s not a %s'%(
+                            raise IndexError('new property "%s": %s not a %s'%(
                                 propname, entry,
-                                self.properties[propname].classname)
+                                self.properties[propname].classname))
                     l.append(entry)
                 value = l
                 propvalues[propname] = value
@@ -1166,7 +1250,7 @@
                 remove = []
 
                 # handle removals
-                if node.has_key(propname):
+                if propname in node:
                     l = node[propname]
                 else:
                     l = []
@@ -1183,7 +1267,8 @@
                 # handle additions
                 for id in value:
                     if not self.db.getclass(link_class).hasnode(id):
-                        raise IndexError, '%s has no node %s'%(link_class, id)
+                        raise IndexError('%s has no node %s'%(link_class,
+                            id))
                     if id in l:
                         continue
                     # register the link with the newly linked node
@@ -1204,38 +1289,45 @@
 
             elif isinstance(prop, hyperdb.String):
                 if value is not None and type(value) != type('') and type(value) != type(u''):
-                    raise TypeError, 'new property "%s" not a string'%propname
+                    raise TypeError('new property "%s" not a '
+                        'string'%propname)
                 if prop.indexme:
                     self.db.indexer.add_text((self.classname, nodeid, propname),
                         value)
 
             elif isinstance(prop, hyperdb.Password):
                 if not isinstance(value, password.Password):
-                    raise TypeError, 'new property "%s" not a Password'%propname
+                    raise TypeError('new property "%s" not a '
+                        'Password'%propname)
                 propvalues[propname] = value
+                journalvalues[propname] = \
+                    current and password.JournalPassword(current)
 
             elif value is not None and isinstance(prop, hyperdb.Date):
                 if not isinstance(value, date.Date):
-                    raise TypeError, 'new property "%s" not a Date'% propname
+                    raise TypeError('new property "%s" not a '
+                        'Date'%propname)
                 propvalues[propname] = value
 
             elif value is not None and isinstance(prop, hyperdb.Interval):
                 if not isinstance(value, date.Interval):
-                    raise TypeError, 'new property "%s" not an '\
-                        'Interval'%propname
+                    raise TypeError('new property "%s" not an '
+                        'Interval'%propname)
                 propvalues[propname] = value
 
             elif value is not None and isinstance(prop, hyperdb.Number):
                 try:
                     float(value)
                 except ValueError:
-                    raise TypeError, 'new property "%s" not numeric'%propname
+                    raise TypeError('new property "%s" not '
+                        'numeric'%propname)
 
             elif value is not None and isinstance(prop, hyperdb.Boolean):
                 try:
                     int(value)
                 except ValueError:
-                    raise TypeError, 'new property "%s" not boolean'%propname
+                    raise TypeError('new property "%s" not '
+                        'boolean'%propname)
 
             node[propname] = value
 
@@ -1268,7 +1360,7 @@
         to modify the "creation" or "activity" properties cause a KeyError.
         """
         if self.db.journaltag is None:
-            raise hyperdb.DatabaseError, _('Database open read-only')
+            raise hyperdb.DatabaseError(_('Database open read-only'))
 
         self.fireAuditors('retire', nodeid, None)
 
@@ -1286,7 +1378,7 @@
         Make node available for all operations like it was before retirement.
         """
         if self.db.journaltag is None:
-            raise hyperdb.DatabaseError, _('Database open read-only')
+            raise hyperdb.DatabaseError(_('Database open read-only'))
 
         node = self.db.getnode(self.classname, nodeid)
         # check if key property was overrided
@@ -1296,8 +1388,8 @@
         except KeyError:
             pass
         else:
-            raise KeyError, "Key property (%s) of retired node clashes with \
-                existing one (%s)" % (key, node[key])
+            raise KeyError("Key property (%s) of retired node clashes "
+                "with existing one (%s)" % (key, node[key]))
         # Now we can safely restore node
         self.fireAuditors('restore', nodeid, None)
         del node[self.db.RETIRED_FLAG]
@@ -1311,7 +1403,7 @@
         """Return true if the node is retired.
         """
         node = self.db.getnode(self.classname, nodeid, cldb)
-        if node.has_key(self.db.RETIRED_FLAG):
+        if self.db.RETIRED_FLAG in node:
             return 1
         return 0
 
@@ -1332,26 +1424,9 @@
         support the session storage of the cgi interface.
         """
         if self.db.journaltag is None:
-            raise hyperdb.DatabaseError, _('Database open read-only')
+            raise hyperdb.DatabaseError(_('Database open read-only'))
         self.db.destroynode(self.classname, nodeid)
 
-    def history(self, nodeid):
-        """Retrieve the journal of edits on a particular node.
-
-        'nodeid' must be the id of an existing node of this class or an
-        IndexError is raised.
-
-        The returned list contains tuples of the form
-
-            (nodeid, date, tag, action, params)
-
-        'date' is a Timestamp object specifying the time of the change and
-        'tag' is the journaltag specified when the database was opened.
-        """
-        if not self.do_journal:
-            raise ValueError, 'Journalling is disabled for this class'
-        return self.db.getjournal(self.classname, nodeid)
-
     # Locating nodes:
     def hasnode(self, nodeid):
         """Determine if the given nodeid actually exists
@@ -1368,7 +1443,7 @@
         """
         prop = self.getprops()[propname]
         if not isinstance(prop, hyperdb.String):
-            raise TypeError, 'key properties must be String'
+            raise TypeError('key properties must be String')
         self.key = propname
 
     def getkey(self):
@@ -1385,21 +1460,22 @@
         otherwise a KeyError is raised.
         """
         if not self.key:
-            raise TypeError, 'No key property set for class %s'%self.classname
+            raise TypeError('No key property set for '
+                'class %s'%self.classname)
         cldb = self.db.getclassdb(self.classname)
         try:
             for nodeid in self.getnodeids(cldb):
                 node = self.db.getnode(self.classname, nodeid, cldb)
-                if node.has_key(self.db.RETIRED_FLAG):
+                if self.db.RETIRED_FLAG in node:
                     continue
-                if not node.has_key(self.key):
+                if self.key not in node:
                     continue
                 if node[self.key] == keyvalue:
                     return nodeid
         finally:
             cldb.close()
-        raise KeyError, 'No key (%s) value "%s" for "%s"'%(self.key,
-            keyvalue, self.classname)
+        raise KeyError('No key (%s) value "%s" for "%s"'%(self.key,
+            keyvalue, self.classname))
 
     # change from spec - allows multiple props to match
     def find(self, **propspec):
@@ -1417,12 +1493,12 @@
             db.issue.find(messages='1')
             db.issue.find(messages={'1':1,'3':1}, files={'7':1})
         """
-        propspec = propspec.items()
-        for propname, itemids in propspec:
+        for propname, itemids in propspec.iteritems():
             # check the prop is OK
             prop = self.properties[propname]
             if not isinstance(prop, hyperdb.Link) and not isinstance(prop, hyperdb.Multilink):
-                raise TypeError, "'%s' not a Link/Multilink property"%propname
+                raise TypeError("'%s' not a Link/Multilink "
+                    "property"%propname)
 
         # ok, now do the find
         cldb = self.db.getclassdb(self.classname)
@@ -1430,15 +1506,15 @@
         try:
             for id in self.getnodeids(db=cldb):
                 item = self.db.getnode(self.classname, id, db=cldb)
-                if item.has_key(self.db.RETIRED_FLAG):
+                if self.db.RETIRED_FLAG in item:
                     continue
-                for propname, itemids in propspec:
+                for propname, itemids in propspec.iteritems():
                     if type(itemids) is not type({}):
                         itemids = {itemids:1}
 
                     # special case if the item doesn't have this property
-                    if not item.has_key(propname):
-                        if itemids.has_key(None):
+                    if propname not in item:
+                        if None in itemids:
                             l.append(id)
                             break
                         continue
@@ -1446,13 +1522,13 @@
                     # grab the property definition and its value on this item
                     prop = self.properties[propname]
                     value = item[propname]
-                    if isinstance(prop, hyperdb.Link) and itemids.has_key(value):
+                    if isinstance(prop, hyperdb.Link) and value in itemids:
                         l.append(id)
                         break
                     elif isinstance(prop, hyperdb.Multilink):
                         hit = 0
                         for v in value:
-                            if itemids.has_key(v):
+                            if v in itemids:
                                 l.append(id)
                                 hit = 1
                                 break
@@ -1470,20 +1546,20 @@
 
         The return is a list of the id of all nodes that match.
         """
-        for propname in requirements.keys():
+        for propname in requirements:
             prop = self.properties[propname]
             if not isinstance(prop, hyperdb.String):
-                raise TypeError, "'%s' not a String property"%propname
+                raise TypeError("'%s' not a String property"%propname)
             requirements[propname] = requirements[propname].lower()
         l = []
         cldb = self.db.getclassdb(self.classname)
         try:
             for nodeid in self.getnodeids(cldb):
                 node = self.db.getnode(self.classname, nodeid, cldb)
-                if node.has_key(self.db.RETIRED_FLAG):
+                if self.db.RETIRED_FLAG in node:
                     continue
-                for key, value in requirements.items():
-                    if not node.has_key(key):
+                for key, value in requirements.iteritems():
+                    if key not in node:
                         break
                     if node[key] is None or node[key].lower() != value:
                         break
@@ -1502,7 +1578,7 @@
         try:
             for nodeid in self.getnodeids(cldb):
                 node = self.db.getnode(cn, nodeid, cldb)
-                if node.has_key(self.db.RETIRED_FLAG):
+                if self.db.RETIRED_FLAG in node:
                     continue
                 l.append(nodeid)
         finally:
@@ -1519,20 +1595,20 @@
         res = []
 
         # start off with the new nodes
-        if self.db.newnodes.has_key(self.classname):
-            res += self.db.newnodes[self.classname].keys()
+        if self.classname in self.db.newnodes:
+            res.extend(self.db.newnodes[self.classname])
 
         must_close = False
         if db is None:
             db = self.db.getclassdb(self.classname)
             must_close = True
         try:
-            res = res + db.keys()
+            res.extend(db.keys())
 
             # remove the uncommitted, destroyed nodes
-            if self.db.destroyednodes.has_key(self.classname):
-                for nodeid in self.db.destroyednodes[self.classname].keys():
-                    if db.has_key(nodeid):
+            if self.classname in self.db.destroyednodes:
+                for nodeid in self.db.destroyednodes[self.classname]:
+                    if key_in(db, nodeid):
                         res.remove(nodeid)
 
             # check retired flag
@@ -1540,7 +1616,7 @@
                 l = []
                 for nodeid in res:
                     node = self.db.getnode(self.classname, nodeid, db)
-                    is_ret = node.has_key(self.db.RETIRED_FLAG)
+                    is_ret = self.db.RETIRED_FLAG in node
                     if retired == is_ret:
                         l.append(nodeid)
                 res = l
@@ -1583,7 +1659,7 @@
         INTERVAL = 'spec:interval'
         OTHER = 'spec:other'
 
-        for k, v in filterspec.items():
+        for k, v in filterspec.iteritems():
             propclass = props[k]
             if isinstance(propclass, hyperdb.Link):
                 if type(v) is not type([]):
@@ -1627,12 +1703,14 @@
                     pass
 
             elif isinstance(propclass, hyperdb.Boolean):
-                if type(v) != type([]):
+                if type(v) == type(""):
                     v = v.split(',')
+                if type(v) != type([]):
+                    v = [v]
                 bv = []
                 for val in v:
                     if type(val) is type(''):
-                        bv.append(val.lower() in ('yes', 'true', 'on', '1'))
+                        bv.append(propclass.from_raw (val))
                     else:
                         bv.append(val)
                 l.append((OTHER, k, bv))
@@ -1644,11 +1722,14 @@
 
             elif isinstance(propclass, hyperdb.Number):
                 if type(v) != type([]):
-                    v = v.split(',')
+                    try :
+                        v = v.split(',')
+                    except AttributeError :
+                        v = [v]
                 l.append((OTHER, k, [float(val) for val in v]))
 
         filterspec = l
-        
+
         # now, find all the nodes that are active and pass filtering
         matches = []
         cldb = self.db.getclassdb(cn)
@@ -1657,7 +1738,7 @@
             # TODO: only full-scan once (use items())
             for nodeid in self.getnodeids(cldb):
                 node = self.db.getnode(cn, nodeid, cldb)
-                if node.has_key(self.db.RETIRED_FLAG):
+                if self.db.RETIRED_FLAG in node:
                     continue
                 # apply filter
                 for t, k, v in filterspec:
@@ -1687,12 +1768,10 @@
                         if not v:
                             match = not nv
                         else:
-                            # othewise, make sure this node has each of the
+                            # otherwise, make sure this node has each of the
                             # required values
-                            for want in v:
-                                if want in nv:
-                                    match = 1
-                                    break
+                            expr = Expression(v)
+                            if expr.evaluate(nv): match = 1
                     elif t == STRING:
                         if nv is None:
                             nv = ''
@@ -1753,7 +1832,7 @@
                         try:
                             v = item[prop]
                         except KeyError:
-                            if JPROPS.has_key(prop):
+                            if prop in JPROPS:
                                 # force lookup of the special journal prop
                                 v = self.get(itemid, prop)
                             else:
@@ -1782,7 +1861,7 @@
                             key = link.orderprop()
                             child = pt.propdict[key]
                             if key!='id':
-                                if not lcache.has_key(v):
+                                if v not in lcache:
                                     # open the link class db if it's not already
                                     if lcldb is None:
                                         lcldb = self.db.getclassdb(lcn)
@@ -1847,15 +1926,15 @@
         may collide with the names of existing properties, or a ValueError
         is raised before any properties have been added.
         """
-        for key in properties.keys():
-            if self.properties.has_key(key):
-                raise ValueError, key
+        for key in properties:
+            if key in self.properties:
+                raise ValueError(key)
         self.properties.update(properties)
 
     def index(self, nodeid):
         """ Add (or refresh) the node to search indexes """
         # find all the String properties that have indexme
-        for prop, propclass in self.getprops().items():
+        for prop, propclass in self.getprops().iteritems():
             if isinstance(propclass, hyperdb.String) and propclass.indexme:
                 # index them under (classname, nodeid, property)
                 try:
@@ -1902,7 +1981,7 @@
             Return the nodeid of the node imported.
         """
         if self.db.journaltag is None:
-            raise hyperdb.DatabaseError, _('Database open read-only')
+            raise hyperdb.DatabaseError(_('Database open read-only'))
         properties = self.getprops()
 
         # make the new node's property map
@@ -1934,9 +2013,7 @@
             elif isinstance(prop, hyperdb.Interval):
                 value = date.Interval(value)
             elif isinstance(prop, hyperdb.Password):
-                pwd = password.Password()
-                pwd.unpack(value)
-                value = pwd
+                value = password.Password(encrypted=value)
             d[propname] = value
 
         # get a new id if necessary
@@ -1962,8 +2039,8 @@
                 date = date.get_tuple()
                 if action == 'set':
                     export_data = {}
-                    for propname, value in params.items():
-                        if not properties.has_key(propname):
+                    for propname, value in params.iteritems():
+                        if propname not in properties:
                             # property no longer in the schema
                             continue
 
@@ -1983,42 +2060,10 @@
                             value = str(value)
                         export_data[propname] = value
                     params = export_data
-                l = [nodeid, date, user, action, params]
-                r.append(map(repr, l))
+                r.append([repr(nodeid), repr(date), repr(user),
+                    repr(action), repr(params)])
         return r
 
-    def import_journals(self, entries):
-        """Import a class's journal.
-
-        Uses setjournal() to set the journal for each item."""
-        properties = self.getprops()
-        d = {}
-        for l in entries:
-            l = map(eval, l)
-            nodeid, jdate, user, action, params = l
-            r = d.setdefault(nodeid, [])
-            if action == 'set':
-                for propname, value in params.items():
-                    prop = properties[propname]
-                    if value is None:
-                        pass
-                    elif isinstance(prop, hyperdb.Date):
-                        if type(value) == type(()):
-                            print _('WARNING: invalid date tuple %r')%(value,)
-                            value = date.Date( "2000-1-1" )
-                        value = date.Date(value)
-                    elif isinstance(prop, hyperdb.Interval):
-                        value = date.Interval(value)
-                    elif isinstance(prop, hyperdb.Password):
-                        pwd = password.Password()
-                        pwd.unpack(value)
-                        value = pwd
-                    params[propname] = value
-            r.append((nodeid, date.Date(jdate), user, action, params))
-
-        for nodeid, l in d.items():
-            self.db.setjournal(self.classname, nodeid, l)
-
 class FileClass(hyperdb.FileClass, Class):
     """This class defines a large chunk of data. To support this, it has a
        mandatory String property "content" which is typically saved off
@@ -2032,9 +2077,9 @@
         """The newly-created class automatically includes the "content"
         and "type" properties.
         """
-        if not properties.has_key('content'):
+        if 'content' not in properties:
             properties['content'] = hyperdb.String(indexme='yes')
-        if not properties.has_key('type'):
+        if 'type' not in properties:
             properties['type'] = hyperdb.String()
         Class.__init__(self, db, classname, **properties)
 
@@ -2072,7 +2117,7 @@
         if propname == 'content':
             try:
                 return self.db.getfile(self.classname, nodeid, None)
-            except IOError, (strerror):
+            except IOError, strerror:
                 # XXX by catching this we don't see an error in the log.
                 return 'ERROR reading file: %s%s\n%s\n%s'%(
                         self.classname, nodeid, poss_msg, strerror)
@@ -2088,8 +2133,8 @@
 
         # create the oldvalues dict - fill in any missing values
         oldvalues = copy.deepcopy(self.db.getnode(self.classname, itemid))
-        for name,prop in self.getprops(protected=0).items():
-            if oldvalues.has_key(name):
+        for name, prop in self.getprops(protected=0).iteritems():
+            if name in oldvalues:
                 continue
             if isinstance(prop, hyperdb.Multilink):
                 oldvalues[name] = []
@@ -2098,7 +2143,7 @@
 
         # now remove the content property so it's not stored in the db
         content = None
-        if propvalues.has_key('content'):
+        if 'content' in propvalues:
             content = propvalues['content']
             del propvalues['content']
 
@@ -2125,7 +2170,7 @@
         Use the content-type property for the content property.
         """
         # find all the String properties that have indexme
-        for prop, propclass in self.getprops().items():
+        for prop, propclass in self.getprops().iteritems():
             if prop == 'content' and propclass.indexme:
                 mime_type = self.get(nodeid, 'type', self.default_mime_type)
                 self.db.indexer.add_text((self.classname, nodeid, 'content'),
@@ -2148,17 +2193,17 @@
         dictionary attempts to specify any of these properties or a
         "creation" or "activity" property, a ValueError is raised.
         """
-        if not properties.has_key('title'):
+        if 'title' not in properties:
             properties['title'] = hyperdb.String(indexme='yes')
-        if not properties.has_key('messages'):
+        if 'messages' not in properties:
             properties['messages'] = hyperdb.Multilink("msg")
-        if not properties.has_key('files'):
+        if 'files' not in properties:
             properties['files'] = hyperdb.Multilink("file")
-        if not properties.has_key('nosy'):
+        if 'nosy' not in properties:
             # note: journalling is turned off as it really just wastes
             # space. this behaviour may be overridden in an instance
             properties['nosy'] = hyperdb.Multilink("user", do_journal="no")
-        if not properties.has_key('superseder'):
+        if 'superseder' not in properties:
             properties['superseder'] = hyperdb.Multilink(classname)
         Class.__init__(self, db, classname, **properties)
 

Modified: tracker/roundup-src/roundup/backends/back_mysql.py
==============================================================================
--- tracker/roundup-src/roundup/backends/back_mysql.py	(original)
+++ tracker/roundup-src/roundup/backends/back_mysql.py	Thu Aug  4 15:46:52 2011
@@ -564,6 +564,11 @@
                 raise
 
 class MysqlClass:
+
+    def supports_subselects(self):
+        # TODO: AFAIK its version dependent for MySQL
+        return False
+
     def _subselect(self, classname, multilink_table):
         ''' "I can't believe it's not a toy RDBMS"
            see, even toy RDBMSes like gadfly and sqlite can do sub-selects...
@@ -572,6 +577,70 @@
         s = ','.join([x[0] for x in self.db.sql_fetchall()])
         return '_%s.id not in (%s)'%(classname, s)
 
+    def create_inner(self, **propvalues):
+        try:
+            return rdbms_common.Class.create_inner(self, **propvalues)
+        except MySQLdb.IntegrityError, e:
+            self._handle_integrity_error(e, propvalues)
+
+    def set_inner(self, nodeid, **propvalues):
+        try:
+            return rdbms_common.Class.set_inner(self, nodeid,
+                                                **propvalues)
+        except MySQLdb.IntegrityError, e:
+            self._handle_integrity_error(e, propvalues)
+
+    def _handle_integrity_error(self, e, propvalues):
+        ''' Handle a MySQL IntegrityError.
+
+        If the error is recognized, then it may be converted into an
+        alternative exception.  Otherwise, it is raised unchanged from
+        this function.'''
+
+        # There are checks in create_inner/set_inner to see if a node
+        # is being created with the same key as an existing node.
+        # But, there is a race condition -- we may pass those checks,
+        # only to find out that a parallel session has created the
+        # node by by the time we actually issue the SQL command to
+        # create the node.  Fortunately, MySQL gives us a unique error
+        # code for this situation, so we can detect it here and handle
+        # it appropriately.
+        # 
+        # The details of the race condition are as follows, where
+        # "X" is a classname, and the term "thread" is meant to
+        # refer generically to both threads and processes:
+        #
+        # Thread A                    Thread B
+        # --------                    --------
+        #                             read table for X
+        # create new X object
+        # commit
+        #                             create new X object
+        #
+        # In Thread B, the check in create_inner does not notice that
+        # the new X object is a duplicate of that committed in Thread
+        # A because MySQL's default "consistent nonlocking read"
+        # behavior means that Thread B sees a snapshot of the database
+        # at the point at which its transaction began -- which was
+        # before Thread A created the object.  However, the attempt
+        # to *write* to the table for X, creating a duplicate entry,
+        # triggers an error at the point of the write.
+        #
+        # If both A and B's transaction begins with creating a new X
+        # object, then this bug cannot occur because creating the
+        # object requires getting a new ID, and newid() locks the id
+        # table until the transaction is committed or rolledback.  So,
+        # B will block until A's commit is complete, and will not
+        # actually get its snapshot until A's transaction completes.
+        # But, if the transaction has begun prior to calling newid,
+        # then the snapshot has already been established.
+        if e[0] == ER.DUP_ENTRY:
+            key = propvalues[self.key]
+            raise ValueError, 'node with key "%s" exists' % key
+        # We don't know what this exception is; reraise it.
+        raise
+        
+
 class Class(MysqlClass, rdbms_common.Class):
     pass
 class IssueClass(MysqlClass, rdbms_common.IssueClass):

Modified: tracker/roundup-src/roundup/backends/back_postgresql.py
==============================================================================
--- tracker/roundup-src/roundup/backends/back_postgresql.py	(original)
+++ tracker/roundup-src/roundup/backends/back_postgresql.py	Thu Aug  4 15:46:52 2011
@@ -27,38 +27,46 @@
 def connection_dict(config, dbnamestr=None):
     ''' read_default_group is MySQL-specific, ignore it '''
     d = rdbms_common.connection_dict(config, dbnamestr)
-    if d.has_key('read_default_group'):
+    if 'read_default_group' in d:
         del d['read_default_group']
-    if d.has_key('read_default_file'):
+    if 'read_default_file' in d:
         del d['read_default_file']
     return d
 
 def db_create(config):
     """Clear all database contents and drop database itself"""
-    command = "CREATE DATABASE %s WITH ENCODING='UNICODE'"%config.RDBMS_NAME
-    logging.getLogger('hyperdb').info(command)
+    command = "CREATE DATABASE \"%s\" WITH ENCODING='UNICODE'"%config.RDBMS_NAME
+    if config.RDBMS_TEMPLATE :
+        command = command + " TEMPLATE=%s" % config.RDBMS_TEMPLATE
+    logging.getLogger('roundup.hyperdb').info(command)
     db_command(config, command)
 
 def db_nuke(config, fail_ok=0):
     """Clear all database contents and drop database itself"""
-    command = 'DROP DATABASE %s'% config.RDBMS_NAME
-    logging.getLogger('hyperdb').info(command)
+    command = 'DROP DATABASE "%s"'% config.RDBMS_NAME
+    logging.getLogger('roundup.hyperdb').info(command)
     db_command(config, command)
 
     if os.path.exists(config.DATABASE):
         shutil.rmtree(config.DATABASE)
 
-def db_command(config, command):
+def db_command(config, command, database='postgres'):
     '''Perform some sort of database-level command. Retry 10 times if we
     fail by conflicting with another user.
+
+    Since PostgreSQL version 8.1 there is a database "postgres",
+    before "template1" seems to habe been used, so we fall back to it. 
+    Compare to issue2550543.
     '''
     template1 = connection_dict(config)
-    template1['database'] = 'template1'
+    template1['database'] = database
 
     try:
         conn = psycopg.connect(**template1)
     except psycopg.OperationalError, message:
-        raise hyperdb.DatabaseError, message
+        if str(message).find('database "postgres" does not exist') >= 0:
+            return db_command(config, command, database='template1')
+        raise hyperdb.DatabaseError(message)
 
     conn.set_isolation_level(0)
     cursor = conn.cursor()
@@ -68,7 +76,7 @@
                 return
     finally:
         conn.close()
-    raise RuntimeError, '10 attempts to create database failed'
+    raise RuntimeError('10 attempts to create database failed')
 
 def pg_command(cursor, command):
     '''Execute the postgresql command, which may be blocked by some other
@@ -81,7 +89,7 @@
     except psycopg.ProgrammingError, err:
         response = str(err).split('\n')[0]
         if response.find('FATAL') != -1:
-            raise RuntimeError, response
+            raise RuntimeError(response)
         else:
             msgs = [
                 'is being accessed by other users',
@@ -94,7 +102,7 @@
             if can_retry:
                 time.sleep(1)
                 return 0
-            raise RuntimeError, response
+            raise RuntimeError(response)
     return 1
 
 def db_exists(config):
@@ -131,11 +139,12 @@
 
     def sql_open_connection(self):
         db = connection_dict(self.config, 'database')
-        logging.getLogger('hyperdb').info('open database %r'%db['database'])
+        logging.getLogger('roundup.hyperdb').info(
+            'open database %r'%db['database'])
         try:
             conn = psycopg.connect(**db)
         except psycopg.OperationalError, message:
-            raise hyperdb.DatabaseError, message
+            raise hyperdb.DatabaseError(message)
 
         cursor = conn.cursor()
 
@@ -209,7 +218,7 @@
     def add_actor_column(self):
         # update existing tables to have the new actor column
         tables = self.database_schema['tables']
-        for name in tables.keys():
+        for name in tables:
             self.sql('ALTER TABLE _%s add __actor VARCHAR(255)'%name)
 
     def __repr__(self):
@@ -218,7 +227,7 @@
     def sql_commit(self, fail_ok=False):
         ''' Actually commit to the database.
         '''
-        logging.getLogger('hyperdb').info('commit')
+        logging.getLogger('roundup.hyperdb').info('commit')
 
         try:
             self.conn.commit()
@@ -226,7 +235,8 @@
             # we've been instructed that this commit is allowed to fail
             if fail_ok and str(message).endswith('could not serialize '
                     'access due to concurrent update'):
-                logging.getLogger('hyperdb').info('commit FAILED, but fail_ok')
+                logging.getLogger('roundup.hyperdb').info(
+                    'commit FAILED, but fail_ok')
             else:
                 raise
 
@@ -271,7 +281,7 @@
         rdbms_common.Database.clear(self)
 
         # reset the sequences
-        for cn in self.classes.keys():
+        for cn in self.classes:
             self.cursor.execute('DROP SEQUENCE _%s_ids'%cn)
             self.cursor.execute('CREATE SEQUENCE _%s_ids'%cn)
 

Modified: tracker/roundup-src/roundup/backends/back_sqlite.py
==============================================================================
--- tracker/roundup-src/roundup/backends/back_sqlite.py	(original)
+++ tracker/roundup-src/roundup/backends/back_sqlite.py	Thu Aug  4 15:46:52 2011
@@ -75,11 +75,11 @@
 
     def sqlite_busy_handler(self, data, table, count):
         """invoked whenever SQLite tries to access a database that is locked"""
+        now = time.time()
         if count == 1:
-            # use a 30 second timeout (extraordinarily generous)
-            # for handling locked database
-            self._busy_handler_endtime = time.time() + 30
-        elif time.time() > self._busy_handler_endtime:
+            # Timeout for handling locked database (default 30s)
+            self._busy_handler_endtime = now + self.config.RDBMS_SQLITE_TIMEOUT
+        elif now > self._busy_handler_endtime:
             # timeout expired - no more retries
             return 0
         # sleep adaptively as retry count grows,
@@ -99,14 +99,14 @@
             os.makedirs(self.config.DATABASE)
 
         db = os.path.join(self.config.DATABASE, 'db')
-        logging.getLogger('hyperdb').info('open database %r'%db)
-        # set a 30 second timeout (extraordinarily generous) for handling
-        # locked database
+        logging.getLogger('roundup.hyperdb').info('open database %r'%db)
+        # set timeout (30 second default is extraordinarily generous)
+        # for handling locked database
         if sqlite_version == 1:
             conn = sqlite.connect(db=db)
             conn.db.sqlite_busy_handler(self.sqlite_busy_handler)
         else:
-            conn = sqlite.connect(db, timeout=30)
+            conn = sqlite.connect(db, timeout=self.config.RDBMS_SQLITE_TIMEOUT)
             conn.row_factory = sqlite.Row
 
         # pysqlite2 / sqlite3 want us to store Unicode in the db but
@@ -160,7 +160,7 @@
         # update existing tables to have the new actor column
         tables = self.database_schema['tables']
         for classname, spec in self.classes.items():
-            if tables.has_key(classname):
+            if classname in tables:
                 dbspec = tables[classname]
                 self.update_class(spec, dbspec, force=1, adding_v2=1)
                 # we've updated - don't try again
@@ -179,7 +179,6 @@
             SQLite doesn't have ALTER TABLE, so we have to copy and
             regenerate the tables with the new schema.
         """
-        new_has = spec.properties.has_key
         new_spec = spec.schema()
         new_spec[1].sort()
         old_spec[1].sort()
@@ -187,20 +186,20 @@
             # no changes
             return 0
 
-        logging.getLogger('hyperdb').info('update_class %s'%spec.classname)
+        logging.getLogger('roundup.hyperdb').info(
+            'update_class %s'%spec.classname)
 
         # detect multilinks that have been removed, and drop their table
         old_has = {}
         for name, prop in old_spec[1]:
             old_has[name] = 1
-            if new_has(name) or not isinstance(prop, hyperdb.Multilink):
+            if name in spec.properties or not isinstance(prop, hyperdb.Multilink):
                 continue
             # it's a multilink, and it's been removed - drop the old
             # table. First drop indexes.
             self.drop_multilink_table_indexes(spec.classname, name)
             sql = 'drop table %s_%s'%(spec.classname, prop)
             self.sql(sql)
-        old_has = old_has.has_key
 
         # now figure how we populate the new table
         if adding_v2:
@@ -211,7 +210,7 @@
         for propname,x in new_spec[1]:
             prop = properties[propname]
             if isinstance(prop, hyperdb.Multilink):
-                if not old_has(propname):
+                if propname not in old_has:
                     # we need to create the new table
                     self.create_multilink_table(spec, propname)
                 elif force:
@@ -232,7 +231,7 @@
                         (%s, %s)"""%(tn, self.arg, self.arg)
                     for linkid, nodeid in rows:
                         self.sql(sql, (int(linkid), int(nodeid)))
-            elif old_has(propname):
+            elif propname in old_has:
                 # we copy this col over from the old table
                 fetch.append('_'+propname)
 
@@ -263,7 +262,7 @@
                 elif isinstance(prop, hyperdb.Interval):
                     inscols.append('_'+propname)
                     inscols.append('__'+propname+'_int__')
-                elif old_has(propname):
+                elif propname in old_has:
                     # we copy this col over from the old table
                     inscols.append('_'+propname)
 
@@ -283,7 +282,7 @@
                                 v = hyperdb.Interval(entry[name]).as_seconds()
                             except IndexError:
                                 v = None
-                        elif entry.has_key(name):
+                        elif name in entry:
                             v = hyperdb.Interval(entry[name]).as_seconds()
                         else:
                             v = None
@@ -292,7 +291,7 @@
                             v = entry[name]
                         except IndexError:
                             v = None
-                    elif (sqlite_version == 1 and entry.has_key(name)):
+                    elif (sqlite_version == 1 and name in entry):
                         v = entry[name]
                     else:
                         v = None
@@ -397,8 +396,8 @@
         """ If there's NO matches to a fetch, sqlite returns NULL
             instead of nothing
         """
-        return filter(None, rdbms_common.Class.filter(self, search_matches,
-            filterspec, sort=sort, group=group))
+        return [f for f in rdbms_common.Class.filter(self, search_matches,
+            filterspec, sort=sort, group=group) if f]
 
 class Class(sqliteClass, rdbms_common.Class):
     pass

Modified: tracker/roundup-src/roundup/backends/blobfiles.py
==============================================================================
--- tracker/roundup-src/roundup/backends/blobfiles.py	(original)
+++ tracker/roundup-src/roundup/backends/blobfiles.py	Thu Aug  4 15:46:52 2011
@@ -304,6 +304,10 @@
         # file just ain't there
         raise IOError('content file for %s not found'%filename)
 
+    def filesize(self, classname, nodeid, property=None, create=0):
+        filename = self.filename(classname, nodeid, property, create)
+        return os.path.getsize(filename)
+
     def storefile(self, classname, nodeid, property, content):
         """Store the content of the file in the database. The property may be
            None, in which case the filename does not indicate which property

Modified: tracker/roundup-src/roundup/backends/indexer_common.py
==============================================================================
--- tracker/roundup-src/roundup/backends/indexer_common.py	(original)
+++ tracker/roundup-src/roundup/backends/indexer_common.py	Thu Aug  4 15:46:52 2011
@@ -36,8 +36,6 @@
     def search(self, search_terms, klass, ignore={}):
         """Display search results looking for [search, terms] associated
         with the hyperdb Class "klass". Ignore hits on {class: property}.
-
-        "dre" is a helper, not an argument.
         """
         # do the index lookup
         hits = self.getHits(search_terms, klass)
@@ -45,7 +43,7 @@
             return {}
 
         designator_propname = {}
-        for nm, propclass in klass.getprops().items():
+        for nm, propclass in klass.getprops().iteritems():
             if _isLink(propclass):
                 designator_propname.setdefault(propclass.classname,
                     []).append(nm)
@@ -54,7 +52,7 @@
         # and files
         nodeids = {}      # this is the answer
         propspec = {}     # used to do the klass.find
-        for l in designator_propname.values():
+        for l in designator_propname.itervalues():
             for propname in l:
                 propspec[propname] = {}  # used as a set (value doesn't matter)
 
@@ -63,7 +61,7 @@
             # skip this result if we don't care about this class/property
             classname = entry[0]
             property = entry[2]
-            if ignore.has_key((classname, property)):
+            if (classname, property) in ignore:
                 continue
 
             # if it's a property on klass, it's easy
@@ -71,12 +69,12 @@
             # backends as that can cause problems down the track)
             nodeid = str(entry[1])
             if classname == klass.classname:
-                if not nodeids.has_key(nodeid):
+                if nodeid not in nodeids:
                     nodeids[nodeid] = {}
                 continue
 
             # make sure the class is a linked one, otherwise ignore
-            if not designator_propname.has_key(classname):
+            if classname not in designator_propname:
                 continue
 
             # it's a linked class - set up to do the klass.find
@@ -84,7 +82,7 @@
                 propspec[linkprop][nodeid] = 1
 
         # retain only the meaningful entries
-        for propname, idset in propspec.items():
+        for propname, idset in list(propspec.items()):
             if not idset:
                 del propspec[propname]
 
@@ -97,16 +95,16 @@
             nodeids[resid] = {}
             node_dict = nodeids[resid]
             # now figure out where it came from
-            for linkprop in propspec.keys():
+            for linkprop in propspec:
                 v = klass.get(resid, linkprop)
                 # the link might be a Link so deal with a single result or None
                 if isinstance(propdefs[linkprop], hyperdb.Link):
                     if v is None: continue
                     v = [v]
                 for nodeid in v:
-                    if propspec[linkprop].has_key(nodeid):
+                    if nodeid in propspec[linkprop]:
                         # OK, this node[propname] has a winner
-                        if not node_dict.has_key(linkprop):
+                        if linkprop not in node_dict:
                             node_dict[linkprop] = [nodeid]
                         else:
                             node_dict[linkprop].append(nodeid)

Modified: tracker/roundup-src/roundup/backends/indexer_dbm.py
==============================================================================
--- tracker/roundup-src/roundup/backends/indexer_dbm.py	(original)
+++ tracker/roundup-src/roundup/backends/indexer_dbm.py	Thu Aug  4 15:46:52 2011
@@ -81,7 +81,7 @@
         self.load_index()
 
         # remove old entries for this identifier
-        if self.files.has_key(identifier):
+        if identifier in self.files:
             self.purge_entry(identifier)
 
         # split into words
@@ -99,15 +99,15 @@
         for word in words:
             if self.is_stopword(word):
                 continue
-            if filedict.has_key(word):
+            if word in filedict:
                 filedict[word] = filedict[word]+1
             else:
                 filedict[word] = 1
 
         # now add to the totals
-        for word in filedict.keys():
+        for word in filedict:
             # each word has a dict of {identifier: count}
-            if self.words.has_key(word):
+            if word in self.words:
                 entry = self.words[word]
             else:
                 # new word
@@ -162,18 +162,18 @@
                 return {}
             if hits is None:
                 hits = {}
-                for k in entry.keys():
-                    if not self.fileids.has_key(k):
-                        raise ValueError, 'Index is corrupted: re-generate it'
+                for k in entry:
+                    if k not in self.fileids:
+                        raise ValueError('Index is corrupted: re-generate it')
                     hits[k] = self.fileids[k]
             else:
                 # Eliminate hits for every non-match
-                for fileid in hits.keys():
-                    if not entry.has_key(fileid):
+                for fileid in list(hits):
+                    if fileid not in entry:
                         del hits[fileid]
         if hits is None:
             return {}
-        return hits.values()
+        return list(hits.values())
 
     segments = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ#_-!"
     def load_index(self, reload=0, wordlist=None):
@@ -205,7 +205,7 @@
                 dbslice = marshal.loads(pickle_str)
                 if dbslice.get('WORDS'):
                     # if it has some words, add them
-                    for word, entry in dbslice['WORDS'].items():
+                    for word, entry in dbslice['WORDS'].iteritems():
                         db['WORDS'][word] = entry
                 if dbslice.get('FILES'):
                     # if it has some files, add them
@@ -241,7 +241,7 @@
         segdicts = {}                           # Need batch of empty dicts
         for segment in letters:
             segdicts[segment] = {}
-        for word, entry in self.words.items():  # Split into segment dicts
+        for word, entry in self.words.iteritems():  # Split into segment dicts
             initchar = word[0].upper()
             segdicts[initchar][word] = entry
 
@@ -262,7 +262,7 @@
         '''
         self.load_index()
 
-        if not self.files.has_key(identifier):
+        if identifier not in self.files:
             return
 
         file_index = self.files[identifier][0]
@@ -270,8 +270,8 @@
         del self.fileids[file_index]
 
         # The much harder part, cleanup the word index
-        for key, occurs in self.words.items():
-            if occurs.has_key(file_index):
+        for key, occurs in self.words.iteritems():
+            if file_index in occurs:
                 del occurs[file_index]
 
         # save needed

Modified: tracker/roundup-src/roundup/backends/indexer_rdbms.py
==============================================================================
--- tracker/roundup-src/roundup/backends/indexer_rdbms.py	(original)
+++ tracker/roundup-src/roundup/backends/indexer_rdbms.py	Thu Aug  4 15:46:52 2011
@@ -64,10 +64,12 @@
             self.db.cursor.execute(sql, (id, ))
 
         # ok, find all the unique words in the text
-        text = unicode(text, "utf-8", "replace").upper()
+        if not isinstance(text, unicode):
+            text = unicode(text, "utf-8", "replace")
+        text = text.upper()
         wordlist = [w.encode("utf-8")
-            for w in re.findall(r'(?u)\b\w{%d,%d}\b'
-                                % (self.minlength, self.maxlength), text)]
+                    for w in re.findall(r'(?u)\b\w{%d,%d}\b'
+                                        % (self.minlength, self.maxlength), text)]
         words = set()
         for word in wordlist:
             if self.is_stopword(word): continue
@@ -127,7 +129,7 @@
             sql = sql%(' '.join(join_list), self.db.arg, ' '.join(match_list))
             self.db.cursor.execute(sql, l)
 
-            r = map(lambda x: x[0], self.db.cursor.fetchall())
+            r = [x[0] for x in self.db.cursor.fetchall()]
             if not r:
                 return []
 

Modified: tracker/roundup-src/roundup/backends/indexer_xapian.py
==============================================================================
--- tracker/roundup-src/roundup/backends/indexer_xapian.py	(original)
+++ tracker/roundup-src/roundup/backends/indexer_xapian.py	Thu Aug  4 15:46:52 2011
@@ -24,7 +24,6 @@
         '''Save the changes to the index.'''
         if not self.transaction_active:
             return
-        # XXX: Xapian databases don't actually implement transactions yet
         database = self._get_database()
         database.commit_transaction()
         self.transaction_active = False
@@ -36,7 +35,6 @@
     def rollback(self):
         if not self.transaction_active:
             return
-        # XXX: Xapian databases don't actually implement transactions yet
         database = self._get_database()
         database.cancel_transaction()
         self.transaction_active = False
@@ -59,7 +57,9 @@
 
         # open the database and start a transaction if needed
         database = self._get_database()
-        # XXX: Xapian databases don't actually implement transactions yet
+
+        # XXX: Xapian now supports transactions, 
+        #  but there is a call to save_index() missing.
         #if not self.transaction_active:
             #database.begin_transaction()
             #self.transaction_active = True
@@ -72,21 +72,10 @@
         # indexed so we know what we're matching when we get results
         identifier = '%s:%s:%s'%identifier
 
-        # see if the id is in the database
-        enquire = xapian.Enquire(database)
-        query = xapian.Query(xapian.Query.OP_AND, [identifier])
-        enquire.set_query(query)
-        matches = enquire.get_mset(0, 10)
-        if matches.size():      # would it killya to implement __len__()??
-            b = matches.begin()
-            docid = b.get_docid()
-        else:
-            docid = None
-
         # create the new document
         doc = xapian.Document()
         doc.set_data(identifier)
-        doc.add_posting(identifier, 0)
+        doc.add_term(identifier, 0)
 
         for match in re.finditer(r'\b\w{%d,%d}\b'
                                  % (self.minlength, self.maxlength),
@@ -96,10 +85,8 @@
                 continue
             term = stemmer(word)
             doc.add_posting(term, match.start(0))
-        if docid:
-            database.replace_document(docid, doc)
-        else:
-            database.add_document(doc)
+
+        database.replace_document(identifier, doc)
 
     def find(self, wordlist):
         '''look up all the words in the wordlist.
@@ -123,6 +110,6 @@
         enquire.set_query(query)
         matches = enquire.get_mset(0, 10)
 
-        return [tuple(m[xapian.MSET_DOCUMENT].get_data().split(':'))
+        return [tuple(m.document.get_data().split(':'))
             for m in matches]
 

Modified: tracker/roundup-src/roundup/backends/locking.py
==============================================================================
--- tracker/roundup-src/roundup/backends/locking.py	(original)
+++ tracker/roundup-src/roundup/backends/locking.py	Thu Aug  4 15:46:52 2011
@@ -28,12 +28,11 @@
 '''
 __docformat__ = 'restructuredtext'
 
-import portalocker
+from roundup.backends import portalocker
 
 def acquire_lock(path, block=1):
     '''Acquire a lock for the given path
     '''
-    import portalocker
     file = open(path, 'w')
     if block:
         portalocker.lock(file, portalocker.LOCK_EX)

Modified: tracker/roundup-src/roundup/backends/portalocker.py
==============================================================================
--- tracker/roundup-src/roundup/backends/portalocker.py	(original)
+++ tracker/roundup-src/roundup/backends/portalocker.py	Thu Aug  4 15:46:52 2011
@@ -136,10 +136,9 @@
 if __name__ == '__main__':
     from time import time, strftime, localtime
     import sys
-    import portalocker
 
     log = open('log.txt', "a+")
-    portalocker.lock(log, portalocker.LOCK_EX)
+    lock(log, LOCK_EX)
 
     timestamp = strftime("%m/%d/%Y %H:%M:%S\n", localtime(time()))
     log.write( timestamp )

Modified: tracker/roundup-src/roundup/backends/rdbms_common.py
==============================================================================
--- tracker/roundup-src/roundup/backends/rdbms_common.py	(original)
+++ tracker/roundup-src/roundup/backends/rdbms_common.py	Thu Aug  4 15:46:52 2011
@@ -52,7 +52,7 @@
 __docformat__ = 'restructuredtext'
 
 # standard python modules
-import sys, os, time, re, errno, weakref, copy, logging
+import sys, os, time, re, errno, weakref, copy, logging, datetime
 
 # roundup modules
 from roundup import hyperdb, date, password, roundupdb, security, support
@@ -62,15 +62,19 @@
 from roundup.support import reversed
 from roundup.i18n import _
 
+
 # support
-from blobfiles import FileStorage
+from roundup.backends.blobfiles import FileStorage
 try:
-    from indexer_xapian import Indexer
+    from roundup.backends.indexer_xapian import Indexer
 except ImportError:
-    from indexer_rdbms import Indexer
-from sessions_rdbms import Sessions, OneTimeKeys
+    from roundup.backends.indexer_rdbms import Indexer
+from roundup.backends.sessions_rdbms import Sessions, OneTimeKeys
 from roundup.date import Range
 
+from roundup.backends.back_anydbm import compile_expression
+
+
 # dummy value meaning "argument not passed"
 _marker = []
 
@@ -87,6 +91,13 @@
     # assume it's a number returned from the db API
     return int(value)
 
+def date_to_hyperdb_value(d):
+    """ convert date d to a roundup date """
+    if isinstance (d, datetime.datetime):
+        return date.Date(d)
+    return date.Date (str(d).replace(' ', '.'))
+
+
 def connection_dict(config, dbnamestr=None):
     """ Used by Postgresql and MySQL to detemine the keyword args for
     opening the database connection."""
@@ -100,6 +111,54 @@
             d[name] = config[cvar]
     return d
 
+
+class IdListOptimizer:
+    """ To prevent flooding the SQL parser of the underlaying
+        db engine with "x IN (1, 2, 3, ..., <large number>)" collapses
+        these cases to "x BETWEEN 1 AND <large number>".
+    """
+
+    def __init__(self):
+        self.ranges  = []
+        self.singles = []
+
+    def append(self, nid):
+        """ Invariant: nid are ordered ascending """
+        if self.ranges:
+            last = self.ranges[-1]
+            if last[1] == nid-1:
+                last[1] = nid
+                return
+        if self.singles:
+            last = self.singles[-1]
+            if last == nid-1:
+                self.singles.pop()
+                self.ranges.append([last, nid])
+                return
+        self.singles.append(nid)
+
+    def where(self, field, placeholder):
+        ranges  = self.ranges
+        singles = self.singles
+
+        if not singles and not ranges: return "(1=0)", []
+
+        if ranges:
+            between = '%s BETWEEN %s AND %s' % (
+                field, placeholder, placeholder)
+            stmnt = [between] * len(ranges)
+        else:
+            stmnt = []
+        if singles:
+            stmnt.append('%s in (%s)' % (
+                field, ','.join([placeholder]*len(singles))))
+
+        return '(%s)' % ' OR '.join(stmnt), sum(ranges, []) + singles
+
+    def __str__(self):
+        return "ranges: %r / singles: %r" % (self.ranges, self.singles)
+
+
 class Database(FileStorage, hyperdb.Database, roundupdb.Database):
     """ Wrapper around an SQL database that presents a hyperdb interface.
 
@@ -123,8 +182,7 @@
         # keep a cache of the N most recently retrieved rows of any kind
         # (classname, nodeid) = row
         self.cache_size = config.RDBMS_CACHE_SIZE
-        self.cache = {}
-        self.cache_lru = []
+        self.clearCache()
         self.stats = {'cache_hits': 0, 'cache_misses': 0, 'get_items': 0,
             'filtering': 0}
 
@@ -151,14 +209,16 @@
         """
         raise NotImplemented
 
-    def sql(self, sql, args=None):
+    def sql(self, sql, args=None, cursor=None):
         """ Execute the sql with the optional args.
         """
         self.log_debug('SQL %r %r'%(sql, args))
+        if not cursor:
+            cursor = self.cursor
         if args:
-            self.cursor.execute(sql, args)
+            cursor.execute(sql, args)
         else:
-            self.cursor.execute(sql)
+            cursor.execute(sql)
 
     def sql_fetchone(self):
         """ Fetch a single row. If there's nothing to fetch, return None.
@@ -170,6 +230,14 @@
         """
         return self.cursor.fetchall()
 
+    def sql_fetchiter(self):
+        """ Fetch all row as a generator
+        """
+        while True:
+            row = self.cursor.fetchone()
+            if not row: break
+            yield row
+
     def sql_stringquote(self, value):
         """ Quote the string so it's safe to put in the 'sql quotes'
         """
@@ -208,8 +276,8 @@
 
         # handle changes in the schema
         tables = self.database_schema['tables']
-        for classname, spec in self.classes.items():
-            if tables.has_key(classname):
+        for classname, spec in self.classes.iteritems():
+            if classname in tables:
                 dbspec = tables[classname]
                 if self.update_class(spec, dbspec):
                     tables[classname] = spec.schema()
@@ -219,8 +287,8 @@
                 tables[classname] = spec.schema()
                 save = 1
 
-        for classname, spec in tables.items():
-            if not self.classes.has_key(classname):
+        for classname, spec in list(tables.items()):
+            if classname not in self.classes:
                 self.drop_class(classname, tables[classname])
                 del tables[classname]
                 save = 1
@@ -298,7 +366,7 @@
     def fix_version_4_tables(self):
         # note this is an explicit call now
         c = self.cursor
-        for cn, klass in self.classes.items():
+        for cn, klass in self.classes.iteritems():
             c.execute('select id from _%s where __retired__<>0'%(cn,))
             for (id,) in c.fetchall():
                 c.execute('update _%s set __retired__=%s where id=%s'%(cn,
@@ -311,7 +379,7 @@
         """Get current journal table contents, drop the table and re-create"""
         c = self.cursor
         cols = ','.join('nodeid date tag action params'.split())
-        for klass in self.classes.values():
+        for klass in self.classes.itervalues():
             # slurp and drop
             sql = 'select %s from %s__journal order by date'%(cols,
                 klass.classname)
@@ -333,9 +401,9 @@
         """Get current Class tables that contain String properties, and
         convert the VARCHAR columns to TEXT"""
         c = self.cursor
-        for klass in self.classes.values():
+        for klass in self.classes.itervalues():
             # slurp and drop
-            cols, mls = self.determine_columns(klass.properties.items())
+            cols, mls = self.determine_columns(list(klass.properties.iteritems()))
             scols = ','.join([i[0] for i in cols])
             sql = 'select id,%s from _%s'%(scols, klass.classname)
             c.execute(sql)
@@ -365,7 +433,7 @@
         if classname:
             classes = [self.getclass(classname)]
         else:
-            classes = self.classes.values()
+            classes = list(self.classes.itervalues())
         for klass in classes:
             if show_progress:
                 for nodeid in support.Progress('Reindex %s'%klass.classname,
@@ -396,7 +464,7 @@
             if issubclass(propclass, k):
                 return v
 
-        raise ValueError, '%r is not a hyperdb property class' % propclass
+        raise ValueError('%r is not a hyperdb property class' % propclass)
     
     def determine_columns(self, properties):
         """ Figure the column names and multilink properties from the spec
@@ -418,7 +486,7 @@
                 continue
 
             if isinstance(prop, type('')):
-                raise ValueError, "string property spec!"
+                raise ValueError("string property spec!")
                 #and prop.find('Multilink') != -1:
                 #mls.append(col)
 
@@ -438,7 +506,6 @@
 
             If 'force' is true, update the database anyway.
         """
-        new_has = spec.properties.has_key
         new_spec = spec.schema()
         new_spec[1].sort()
         old_spec[1].sort()
@@ -446,7 +513,10 @@
             # no changes
             return 0
 
-        logger = logging.getLogger('hyperdb')
+        if not self.config.RDBMS_ALLOW_ALTER:
+            raise DatabaseError(_('ALTER operation disallowed: %r -> %r.'%(old_spec, new_spec)))
+
+        logger = logging.getLogger('roundup.hyperdb')
         logger.info('update_class %s'%spec.classname)
 
         logger.debug('old_spec %r'%(old_spec,))
@@ -464,7 +534,7 @@
         old_has = {}
         for name, prop in old_spec[1]:
             old_has[name] = 1
-            if new_has(name):
+            if name in spec.properties:
                 continue
 
             if prop.find('Multilink to') != -1:
@@ -483,17 +553,16 @@
                 sql = 'alter table _%s drop column _%s'%(spec.classname, name)
 
             self.sql(sql)
-        old_has = old_has.has_key
 
         # if we didn't remove the key prop just then, but the key prop has
         # changed, we still need to remove the old index
-        if keyprop_changes.has_key('remove'):
+        if 'remove' in keyprop_changes:
             self.drop_class_table_key_index(spec.classname,
                 keyprop_changes['remove'])
 
         # add new columns
         for propname, prop in new_spec[1]:
-            if old_has(propname):
+            if propname in old_has:
                 continue
             prop = spec.properties[propname]
             if isinstance(prop, Multilink):
@@ -518,7 +587,7 @@
 
         # if we didn't add the key prop just then, but the key prop has
         # changed, we still need to add the new index
-        if keyprop_changes.has_key('add'):
+        if 'add' in keyprop_changes:
             self.create_class_table_key_index(spec.classname,
                 keyprop_changes['add'])
 
@@ -528,7 +597,7 @@
         """Figure out the columns from the spec and also add internal columns
 
         """
-        cols, mls = self.determine_columns(spec.properties.items())
+        cols, mls = self.determine_columns(list(spec.properties.iteritems()))
 
         # add on our special columns
         cols.append(('id', 'INTEGER PRIMARY KEY'))
@@ -671,6 +740,10 @@
     def create_class(self, spec):
         """ Create a database table according to the given spec.
         """
+
+        if not self.config.RDBMS_ALLOW_CREATE:
+            raise DatabaseError(_('CREATE operation disallowed: "%s".'%spec.classname))
+
         cols, mls = self.create_class_table(spec)
         self.create_journal_table(spec)
 
@@ -683,6 +756,10 @@
 
             Drop the journal and multilink tables too.
         """
+
+        if not self.config.RDBMS_ALLOW_DROP:
+            raise DatabaseError(_('DROP operation disallowed: "%s".'%cn))
+
         properties = spec[1]
         # figure the multilinks
         mls = []
@@ -716,16 +793,16 @@
     def __getattr__(self, classname):
         """ A convenient way of calling self.getclass(classname).
         """
-        if self.classes.has_key(classname):
+        if classname in self.classes:
             return self.classes[classname]
-        raise AttributeError, classname
+        raise AttributeError(classname)
 
     def addclass(self, cl):
         """ Add a Class to the hyperdatabase.
         """
         cn = cl.classname
-        if self.classes.has_key(cn):
-            raise ValueError, cn
+        if cn in self.classes:
+            raise ValueError(cn)
         self.classes[cn] = cl
 
         # add default Edit and View permissions
@@ -735,13 +812,13 @@
             description="User is allowed to edit "+cn)
         self.security.addPermission(name="View", klass=cn,
             description="User is allowed to access "+cn)
+        self.security.addPermission(name="Retire", klass=cn,
+            description="User is allowed to retire "+cn)
 
     def getclasses(self):
         """ Return a list of the names of all existing classes.
         """
-        l = self.classes.keys()
-        l.sort()
-        return l
+        return sorted(self.classes)
 
     def getclass(self, classname):
         """Get the Class object representing a particular class.
@@ -751,7 +828,7 @@
         try:
             return self.classes[classname]
         except KeyError:
-            raise KeyError, 'There is no class called "%s"'%classname
+            raise KeyError('There is no class called "%s"'%classname)
 
     def clear(self):
         """Delete all database contents.
@@ -759,8 +836,8 @@
         Note: I don't commit here, which is different behaviour to the
               "nuke from orbit" behaviour in the dbs.
         """
-        logging.getLogger('hyperdb').info('clear')
-        for cn in self.classes.keys():
+        logging.getLogger('roundup.hyperdb').info('clear')
+        for cn in self.classes:
             sql = 'delete from _%s'%cn
             self.sql(sql)
 
@@ -790,7 +867,22 @@
             if issubclass(propklass, k):
                 return v
 
-        raise ValueError, '%r is not a hyperdb property class' % propklass
+        raise ValueError('%r is not a hyperdb property class' % propklass)
+
+    def _cache_del(self, key):
+        del self.cache[key]
+        self.cache_lru.remove(key)
+
+    def _cache_refresh(self, key):
+        self.cache_lru.remove(key)
+        self.cache_lru.insert(0, key)
+
+    def _cache_save(self, key, node):
+        self.cache[key] = node
+        # update the LRU
+        self.cache_lru.insert(0, key)
+        if len(self.cache_lru) > self.cache_size:
+            del self.cache[self.cache_lru.pop()]
 
     def addnode(self, classname, nodeid, node):
         """ Add the specified node to its class's db.
@@ -800,11 +892,11 @@
 
         # determine the column definitions and multilink tables
         cl = self.classes[classname]
-        cols, mls = self.determine_columns(cl.properties.items())
+        cols, mls = self.determine_columns(list(cl.properties.iteritems()))
 
         # we'll be supplied these props if we're doing an import
         values = node.copy()
-        if not values.has_key('creator'):
+        if 'creator' not in values:
             # add in the "calculated" properties (dupe so we don't affect
             # calling code's node assumptions)
             values['creation'] = values['activity'] = date.Date()
@@ -815,8 +907,8 @@
         del props['id']
 
         # default the non-multilink columns
-        for col, prop in props.items():
-            if not values.has_key(col):
+        for col, prop in props.iteritems():
+            if col not in values:
                 if isinstance(prop, Multilink):
                     values[col] = []
                 else:
@@ -824,9 +916,8 @@
 
         # clear this node out of the cache if it's in there
         key = (classname, nodeid)
-        if self.cache.has_key(key):
-            del self.cache[key]
-            self.cache_lru.remove(key)
+        if key in self.cache:
+            self._cache_del(key)
 
         # figure the values to insert
         vals = []
@@ -874,9 +965,8 @@
 
         # clear this node out of the cache if it's in there
         key = (classname, nodeid)
-        if self.cache.has_key(key):
-            del self.cache[key]
-            self.cache_lru.remove(key)
+        if key in self.cache:
+            self._cache_del(key)
 
         cl = self.classes[classname]
         props = cl.getprops()
@@ -884,7 +974,7 @@
         cols = []
         mls = []
         # add the multilinks separately
-        for col in values.keys():
+        for col in values:
             prop = props[col]
             if isinstance(prop, Multilink):
                 mls.append(col)
@@ -953,7 +1043,7 @@
                     self.sql(sql, (entry, nodeid))
 
         # we have multilink changes to apply
-        for col, (add, remove) in multilink_changes.items():
+        for col, (add, remove) in multilink_changes.iteritems():
             tn = '%s_%s'%(classname, col)
             if add:
                 sql = 'insert into %s (nodeid, linkid) values (%s,%s)'%(tn,
@@ -970,7 +1060,7 @@
 
     sql_to_hyperdb_value = {
         hyperdb.String : str,
-        hyperdb.Date   : lambda x:date.Date(str(x).replace(' ', '.')),
+        hyperdb.Date   : date_to_hyperdb_value,
 #        hyperdb.Link   : int,      # XXX numeric ids
         hyperdb.Link   : str,
         hyperdb.Interval  : date.Interval,
@@ -990,20 +1080,47 @@
             if issubclass(propklass, k):
                 return v
 
-        raise ValueError, '%r is not a hyperdb property class' % propklass
+        raise ValueError('%r is not a hyperdb property class' % propklass)
 
-    def getnode(self, classname, nodeid):
+    def _materialize_multilink(self, classname, nodeid, node, propname):
+        """ evaluation of single Multilink (lazy eval may have skipped this)
+        """
+        if propname not in node:
+            sql = 'select linkid from %s_%s where nodeid=%s'%(classname,
+                propname, self.arg)
+            self.sql(sql, (nodeid,))
+            # extract the first column from the result
+            # XXX numeric ids
+            items = [int(x[0]) for x in self.cursor.fetchall()]
+            items.sort ()
+            node[propname] = [str(x) for x in items]
+
+    def _materialize_multilinks(self, classname, nodeid, node, props=None):
+        """ get all Multilinks of a node (lazy eval may have skipped this)
+        """
+        cl = self.classes[classname]
+        props = props or [pn for (pn, p) in cl.properties.iteritems()
+                          if isinstance(p, Multilink)]
+        for propname in props:
+            if propname not in node:
+                self._materialize_multilink(classname, nodeid, node, propname)
+
+    def getnode(self, classname, nodeid, fetch_multilinks=True):
         """ Get a node from the database.
+            For optimisation optionally we don't fetch multilinks
+            (lazy Multilinks).
+            But for internal database operations we need them.
         """
         # see if we have this node cached
         key = (classname, nodeid)
-        if self.cache.has_key(key):
+        if key in self.cache:
             # push us back to the top of the LRU
-            self.cache_lru.remove(key)
-            self.cache_lru.insert(0, key)
+            self._cache_refresh(key)
             if __debug__:
                 self.stats['cache_hits'] += 1
             # return the cached information
+            if fetch_multilinks:
+                self._materialize_multilinks(classname, nodeid, self.cache[key])
             return self.cache[key]
 
         if __debug__:
@@ -1012,7 +1129,7 @@
 
         # figure the columns we're fetching
         cl = self.classes[classname]
-        cols, mls = self.determine_columns(cl.properties.items())
+        cols, mls = self.determine_columns(list(cl.properties.iteritems()))
         scols = ','.join([col for col,dt in cols])
 
         # perform the basic property fetch
@@ -1021,7 +1138,7 @@
 
         values = self.sql_fetchone()
         if values is None:
-            raise IndexError, 'no such %s node %s'%(classname, nodeid)
+            raise IndexError('no such %s node %s'%(classname, nodeid))
 
         # make up the node
         node = {}
@@ -1037,26 +1154,12 @@
                 value = self.to_hyperdb_value(props[name].__class__)(value)
             node[name] = value
 
-
-        # now the multilinks
-        for col in mls:
-            # get the link ids
-            sql = 'select linkid from %s_%s where nodeid=%s'%(classname, col,
-                self.arg)
-            self.sql(sql, (nodeid,))
-            # extract the first column from the result
-            # XXX numeric ids
-            items = [int(x[0]) for x in self.cursor.fetchall()]
-            items.sort ()
-            node[col] = [str(x) for x in items]
+        if fetch_multilinks and mls:
+            self._materialize_multilinks(classname, nodeid, node, mls)
 
         # save off in the cache
         key = (classname, nodeid)
-        self.cache[key] = node
-        # update the LRU
-        self.cache_lru.insert(0, key)
-        if len(self.cache_lru) > self.cache_size:
-            del self.cache[self.cache_lru.pop()]
+        self._cache_save(key, node)
 
         if __debug__:
             self.stats['get_items'] += (time.time() - start_t)
@@ -1067,14 +1170,15 @@
         """Remove a node from the database. Called exclusively by the
            destroy() method on Class.
         """
-        logging.getLogger('hyperdb').info('destroynode %s%s'%(classname, nodeid))
+        logging.getLogger('roundup.hyperdb').info('destroynode %s%s'%(
+            classname, nodeid))
 
         # make sure the node exists
         if not self.hasnode(classname, nodeid):
-            raise IndexError, '%s has no node %s'%(classname, nodeid)
+            raise IndexError('%s has no node %s'%(classname, nodeid))
 
         # see if we have this node cached
-        if self.cache.has_key((classname, nodeid)):
+        if (classname, nodeid) in self.cache:
             del self.cache[(classname, nodeid)]
 
         # see if there's any obvious commit actions that we should get rid of
@@ -1088,7 +1192,7 @@
 
         # remove from multilnks
         cl = self.getclass(classname)
-        x, mls = self.determine_columns(cl.properties.items())
+        x, mls = self.determine_columns(list(cl.properties.iteritems()))
         for col in mls:
             # get the link ids
             sql = 'delete from %s_%s where nodeid=%s'%(classname, col, self.arg)
@@ -1109,7 +1213,7 @@
             return 0
         # If this node is in the cache, then we do not need to go to
         # the database.  (We don't consider this an LRU hit, though.)
-        if self.cache.has_key((classname, nodeid)):
+        if (classname, nodeid) in self.cache:
             # Return 1, not True, to match the type of the result of
             # the SQL operation below.
             return 1
@@ -1189,7 +1293,7 @@
         """Convert the journal params values into safely repr'able and
         eval'able values."""
         properties = self.getclass(classname).getprops()
-        for param, value in params.items():
+        for param, value in params.iteritems():
             if not value:
                 continue
             property = properties[param]
@@ -1208,7 +1312,7 @@
         """
         # make sure the node exists
         if not self.hasnode(classname, nodeid):
-            raise IndexError, '%s has no node %s'%(classname, nodeid)
+            raise IndexError('%s has no node %s'%(classname, nodeid))
 
         cols = ','.join('nodeid date tag action params'.split())
         journal = self.load_journal(classname, cols, nodeid)
@@ -1220,7 +1324,7 @@
         for nodeid, date_stamp, user, action, params in journal:
             params = eval(params)
             if isinstance(params, type({})):
-                for param, value in params.items():
+                for param, value in params.iteritems():
                     if not value:
                         continue
                     property = properties.get(param, None)
@@ -1229,7 +1333,7 @@
                         continue
                     cvt = self.to_hyperdb_value(property.__class__)
                     if isinstance(property, Password):
-                        params[param] = cvt(value)
+                        params[param] = password.JournalPassword(value)
                     elif isinstance(property, Date):
                         params[param] = cvt(value)
                     elif isinstance(property, Interval):
@@ -1267,7 +1371,7 @@
         date_stamp = self.to_sql_value(Date)(pack_before)
 
         # do the delete
-        for classname in self.classes.keys():
+        for classname in self.classes:
             sql = "delete from %s__journal where date<%s and "\
                 "action<>'create'"%(classname, self.arg)
             self.sql(sql, (date_stamp,))
@@ -1275,7 +1379,7 @@
     def sql_commit(self, fail_ok=False):
         """ Actually commit to the database.
         """
-        logging.getLogger('hyperdb').info('commit')
+        logging.getLogger('roundup.hyperdb').info('commit')
 
         self.conn.commit()
 
@@ -1307,6 +1411,11 @@
         # clear out the transactions
         self.transactions = []
 
+        # clear the cache: Don't carry over cached values from one
+        # transaction to the next (there may be other changes from other
+        # transactions)
+        self.clearCache()
+
     def sql_rollback(self):
         self.conn.rollback()
 
@@ -1316,7 +1425,7 @@
         Undo all the changes made since the database was opened or the last
         commit() or rollback() was performed.
         """
-        logging.getLogger('hyperdb').info('rollback')
+        logging.getLogger('roundup.hyperdb').info('rollback')
 
         self.sql_rollback()
 
@@ -1331,7 +1440,7 @@
         self.clearCache()
 
     def sql_close(self):
-        logging.getLogger('hyperdb').info('close')
+        logging.getLogger('roundup.hyperdb').info('close')
         self.conn.close()
 
     def close(self):
@@ -1354,7 +1463,7 @@
         """ A dumpable version of the schema that we can store in the
             database
         """
-        return (self.key, [(x, repr(y)) for x,y in self.properties.items()])
+        return (self.key, [(x, repr(y)) for x,y in self.properties.iteritems()])
 
     def enableJournalling(self):
         """Turn journalling on for this class
@@ -1392,51 +1501,52 @@
     def create_inner(self, **propvalues):
         """ Called by create, in-between the audit and react calls.
         """
-        if propvalues.has_key('id'):
-            raise KeyError, '"id" is reserved'
+        if 'id' in propvalues:
+            raise KeyError('"id" is reserved')
 
         if self.db.journaltag is None:
-            raise DatabaseError, _('Database open read-only')
+            raise DatabaseError(_('Database open read-only'))
 
-        if propvalues.has_key('creator') or propvalues.has_key('actor') or \
-             propvalues.has_key('creation') or propvalues.has_key('activity'):
-            raise KeyError, '"creator", "actor", "creation" and '\
-                '"activity" are reserved'
+        if ('creator' in propvalues or 'actor' in propvalues or 
+             'creation' in propvalues or 'activity' in propvalues):
+            raise KeyError('"creator", "actor", "creation" and '
+                '"activity" are reserved')
 
         # new node's id
         newid = self.db.newid(self.classname)
 
         # validate propvalues
         num_re = re.compile('^\d+$')
-        for key, value in propvalues.items():
+        for key, value in propvalues.iteritems():
             if key == self.key:
                 try:
                     self.lookup(value)
                 except KeyError:
                     pass
                 else:
-                    raise ValueError, 'node with key "%s" exists'%value
+                    raise ValueError('node with key "%s" exists'%value)
 
             # try to handle this property
             try:
                 prop = self.properties[key]
             except KeyError:
-                raise KeyError, '"%s" has no property "%s"'%(self.classname,
-                    key)
+                raise KeyError('"%s" has no property "%s"'%(self.classname,
+                    key))
 
             if value is not None and isinstance(prop, Link):
                 if type(value) != type(''):
-                    raise ValueError, 'link value must be String'
+                    raise ValueError('link value must be String')
                 link_class = self.properties[key].classname
                 # if it isn't a number, it's a key
                 if not num_re.match(value):
                     try:
                         value = self.db.classes[link_class].lookup(value)
                     except (TypeError, KeyError):
-                        raise IndexError, 'new property "%s": %s not a %s'%(
-                            key, value, link_class)
+                        raise IndexError('new property "%s": %s not a %s'%(
+                            key, value, link_class))
                 elif not self.db.getclass(link_class).hasnode(value):
-                    raise IndexError, '%s has no node %s'%(link_class, value)
+                    raise IndexError('%s has no node %s'%(link_class,
+                        value))
 
                 # save off the value
                 propvalues[key] = value
@@ -1450,22 +1560,21 @@
                 if value is None:
                     value = []
                 if not hasattr(value, '__iter__'):
-                    raise TypeError, 'new property "%s" not an iterable of ids'%key
-
+                    raise TypeError('new property "%s" not an iterable of ids'%key) 
                 # clean up and validate the list of links
                 link_class = self.properties[key].classname
                 l = []
                 for entry in value:
                     if type(entry) != type(''):
-                        raise ValueError, '"%s" multilink value (%r) '\
-                            'must contain Strings'%(key, value)
+                        raise ValueError('"%s" multilink value (%r) '
+                            'must contain Strings'%(key, value))
                     # if it isn't a number, it's a key
                     if not num_re.match(entry):
                         try:
                             entry = self.db.classes[link_class].lookup(entry)
                         except (TypeError, KeyError):
-                            raise IndexError, 'new property "%s": %s not a %s'%(
-                                key, entry, self.properties[key].classname)
+                            raise IndexError('new property "%s": %s not a %s'%(
+                                key, entry, self.properties[key].classname))
                     l.append(entry)
                 value = l
                 propvalues[key] = value
@@ -1473,8 +1582,8 @@
                 # handle additions
                 for nodeid in value:
                     if not self.db.getclass(link_class).hasnode(nodeid):
-                        raise IndexError, '%s has no node %s'%(link_class,
-                            nodeid)
+                        raise IndexError('%s has no node %s'%(link_class,
+                            nodeid))
                     # register the link with the newly linked node
                     if self.do_journal and self.properties[key].do_journal:
                         self.db.addjournal(link_class, nodeid, 'link',
@@ -1482,41 +1591,41 @@
 
             elif isinstance(prop, String):
                 if type(value) != type('') and type(value) != type(u''):
-                    raise TypeError, 'new property "%s" not a string'%key
+                    raise TypeError('new property "%s" not a string'%key)
                 if prop.indexme:
                     self.db.indexer.add_text((self.classname, newid, key),
                         value)
 
             elif isinstance(prop, Password):
                 if not isinstance(value, password.Password):
-                    raise TypeError, 'new property "%s" not a Password'%key
+                    raise TypeError('new property "%s" not a Password'%key)
 
             elif isinstance(prop, Date):
                 if value is not None and not isinstance(value, date.Date):
-                    raise TypeError, 'new property "%s" not a Date'%key
+                    raise TypeError('new property "%s" not a Date'%key)
 
             elif isinstance(prop, Interval):
                 if value is not None and not isinstance(value, date.Interval):
-                    raise TypeError, 'new property "%s" not an Interval'%key
+                    raise TypeError('new property "%s" not an Interval'%key)
 
             elif value is not None and isinstance(prop, Number):
                 try:
                     float(value)
                 except ValueError:
-                    raise TypeError, 'new property "%s" not numeric'%key
+                    raise TypeError('new property "%s" not numeric'%key)
 
             elif value is not None and isinstance(prop, Boolean):
                 try:
                     int(value)
                 except ValueError:
-                    raise TypeError, 'new property "%s" not boolean'%key
+                    raise TypeError('new property "%s" not boolean'%key)
 
         # make sure there's data where there needs to be
-        for key, prop in self.properties.items():
-            if propvalues.has_key(key):
+        for key, prop in self.properties.iteritems():
+            if key in propvalues:
                 continue
             if key == self.key:
-                raise ValueError, 'key property "%s" is required'%key
+                raise ValueError('key property "%s" is required'%key)
             if isinstance(prop, Multilink):
                 propvalues[key] = []
             else:
@@ -1543,35 +1652,34 @@
             return nodeid
 
         # get the node's dict
-        d = self.db.getnode(self.classname, nodeid)
+        d = self.db.getnode(self.classname, nodeid, fetch_multilinks=False)
+        # handle common case -- that property is in dict -- first
+        # if None and one of creator/creation actor/activity return None
+        if propname in d:
+            r = d [propname]
+            # return copy of our list
+            if isinstance (r, list):
+                return r[:]
+            if r is not None:
+                return r
+            elif propname in ('creation', 'activity', 'creator', 'actor'):
+                return r
+
+        # propname not in d:
+        if propname == 'creation' or propname == 'activity':
+            return date.Date()
+        if propname == 'creator' or propname == 'actor':
+            return self.db.getuid()
 
-        if propname == 'creation':
-            if d.has_key('creation'):
-                return d['creation']
-            else:
-                return date.Date()
-        if propname == 'activity':
-            if d.has_key('activity'):
-                return d['activity']
-            else:
-                return date.Date()
-        if propname == 'creator':
-            if d.has_key('creator'):
-                return d['creator']
-            else:
-                return self.db.getuid()
-        if propname == 'actor':
-            if d.has_key('actor'):
-                return d['actor']
-            else:
-                return self.db.getuid()
-
-        # get the property (raises KeyErorr if invalid)
+        # get the property (raises KeyError if invalid)
         prop = self.properties[propname]
 
-        # XXX may it be that propname is valid property name
-        #    (above error is not raised) and not d.has_key(propname)???
-        if (not d.has_key(propname)) or (d[propname] is None):
+        # lazy evaluation of Multilink
+        if propname not in d and isinstance(prop, Multilink):
+            self.db._materialize_multilink(self.classname, nodeid, d, propname)
+
+        # handle there being no value in the table for the property
+        if propname not in d or d[propname] is None:
             if default is _marker:
                 if isinstance(prop, Multilink):
                     return []
@@ -1616,20 +1724,20 @@
         if not propvalues:
             return propvalues
 
-        if propvalues.has_key('creation') or propvalues.has_key('creator') or \
-                propvalues.has_key('actor') or propvalues.has_key('activity'):
-            raise KeyError, '"creation", "creator", "actor" and '\
-                '"activity" are reserved'
+        if ('creator' in propvalues or 'actor' in propvalues or 
+             'creation' in propvalues or 'activity' in propvalues):
+            raise KeyError('"creator", "actor", "creation" and '
+                '"activity" are reserved')
 
-        if propvalues.has_key('id'):
-            raise KeyError, '"id" is reserved'
+        if 'id' in propvalues:
+            raise KeyError('"id" is reserved')
 
         if self.db.journaltag is None:
-            raise DatabaseError, _('Database open read-only')
+            raise DatabaseError(_('Database open read-only'))
 
         node = self.db.getnode(self.classname, nodeid)
         if self.is_retired(nodeid):
-            raise IndexError, 'Requested item is retired'
+            raise IndexError('Requested item is retired')
         num_re = re.compile('^\d+$')
 
         # make a copy of the values dictionary - we'll modify the contents
@@ -1642,7 +1750,7 @@
         # for the Database layer to do its stuff
         multilink_changes = {}
 
-        for propname, value in propvalues.items():
+        for propname, value in list(propvalues.items()):
             # check to make sure we're not duplicating an existing key
             if propname == self.key and node[propname] != value:
                 try:
@@ -1650,7 +1758,7 @@
                 except KeyError:
                     pass
                 else:
-                    raise ValueError, 'node with key "%s" exists'%value
+                    raise ValueError('node with key "%s" exists'%value)
 
             # this will raise the KeyError if the property isn't valid
             # ... we don't use getprops() here because we only care about
@@ -1658,8 +1766,8 @@
             try:
                 prop = self.properties[propname]
             except KeyError:
-                raise KeyError, '"%s" has no property named "%s"'%(
-                    self.classname, propname)
+                raise KeyError('"%s" has no property named "%s"'%(
+                    self.classname, propname))
 
             # if the value's the same as the existing value, no sense in
             # doing anything
@@ -1674,18 +1782,19 @@
                 link_class = prop.classname
                 # if it isn't a number, it's a key
                 if value is not None and not isinstance(value, type('')):
-                    raise ValueError, 'property "%s" link value be a string'%(
-                        propname)
+                    raise ValueError('property "%s" link value be a string'%(
+                        propname))
                 if isinstance(value, type('')) and not num_re.match(value):
                     try:
                         value = self.db.classes[link_class].lookup(value)
                     except (TypeError, KeyError):
-                        raise IndexError, 'new property "%s": %s not a %s'%(
-                            propname, value, prop.classname)
+                        raise IndexError('new property "%s": %s not a %s'%(
+                            propname, value, prop.classname))
 
                 if (value is not None and
                         not self.db.getclass(link_class).hasnode(value)):
-                    raise IndexError, '%s has no node %s'%(link_class, value)
+                    raise IndexError('%s has no node %s'%(link_class,
+                        value))
 
                 if self.do_journal and prop.do_journal:
                     # register the unlink with the old linked node
@@ -1702,22 +1811,22 @@
                 if value is None:
                     value = []
                 if not hasattr(value, '__iter__'):
-                    raise TypeError, 'new property "%s" not an iterable of'\
-                        ' ids'%propname
+                    raise TypeError('new property "%s" not an iterable of'
+                        ' ids'%propname)
                 link_class = self.properties[propname].classname
                 l = []
                 for entry in value:
                     # if it isn't a number, it's a key
                     if type(entry) != type(''):
-                        raise ValueError, 'new property "%s" link value ' \
-                            'must be a string'%propname
+                        raise ValueError('new property "%s" link value '
+                            'must be a string'%propname)
                     if not num_re.match(entry):
                         try:
                             entry = self.db.classes[link_class].lookup(entry)
                         except (TypeError, KeyError):
-                            raise IndexError, 'new property "%s": %s not a %s'%(
+                            raise IndexError('new property "%s": %s not a %s'%(
                                 propname, entry,
-                                self.properties[propname].classname)
+                                self.properties[propname].classname))
                     l.append(entry)
                 value = l
                 propvalues[propname] = value
@@ -1727,7 +1836,7 @@
                 remove = []
 
                 # handle removals
-                if node.has_key(propname):
+                if propname in node:
                     l = node[propname]
                 else:
                     l = []
@@ -1753,7 +1862,8 @@
                     # result in a SQL query, it is more efficient to
                     # avoid the check if possible.
                     if not self.db.getclass(link_class).hasnode(id):
-                        raise IndexError, '%s has no node %s'%(link_class, id)
+                        raise IndexError('%s has no node %s'%(link_class,
+                            id))
                     # register the link with the newly linked node
                     if self.do_journal and self.properties[propname].do_journal:
                         self.db.addjournal(link_class, id, 'link',
@@ -1773,7 +1883,7 @@
 
             elif isinstance(prop, String):
                 if value is not None and type(value) != type('') and type(value) != type(u''):
-                    raise TypeError, 'new property "%s" not a string'%propname
+                    raise TypeError('new property "%s" not a string'%propname)
                 if prop.indexme:
                     if value is None: value = ''
                     self.db.indexer.add_text((self.classname, nodeid, propname),
@@ -1781,31 +1891,33 @@
 
             elif isinstance(prop, Password):
                 if not isinstance(value, password.Password):
-                    raise TypeError, 'new property "%s" not a Password'%propname
+                    raise TypeError('new property "%s" not a Password'%propname)
                 propvalues[propname] = value
+                journalvalues[propname] = \
+                    current and password.JournalPassword(current)
 
             elif value is not None and isinstance(prop, Date):
                 if not isinstance(value, date.Date):
-                    raise TypeError, 'new property "%s" not a Date'% propname
+                    raise TypeError('new property "%s" not a Date'% propname)
                 propvalues[propname] = value
 
             elif value is not None and isinstance(prop, Interval):
                 if not isinstance(value, date.Interval):
-                    raise TypeError, 'new property "%s" not an '\
-                        'Interval'%propname
+                    raise TypeError('new property "%s" not an '
+                        'Interval'%propname)
                 propvalues[propname] = value
 
             elif value is not None and isinstance(prop, Number):
                 try:
                     float(value)
                 except ValueError:
-                    raise TypeError, 'new property "%s" not numeric'%propname
+                    raise TypeError('new property "%s" not numeric'%propname)
 
             elif value is not None and isinstance(prop, Boolean):
                 try:
                     int(value)
                 except ValueError:
-                    raise TypeError, 'new property "%s" not boolean'%propname
+                    raise TypeError('new property "%s" not boolean'%propname)
 
         # nothing to do?
         if not propvalues:
@@ -1838,7 +1950,7 @@
         methods, and other nodes may reuse the values of their key properties.
         """
         if self.db.journaltag is None:
-            raise DatabaseError, _('Database open read-only')
+            raise DatabaseError(_('Database open read-only'))
 
         self.fireAuditors('retire', nodeid, None)
 
@@ -1858,7 +1970,7 @@
         Make node available for all operations like it was before retirement.
         """
         if self.db.journaltag is None:
-            raise DatabaseError, _('Database open read-only')
+            raise DatabaseError(_('Database open read-only'))
 
         node = self.db.getnode(self.classname, nodeid)
         # check if key property was overrided
@@ -1868,8 +1980,8 @@
         except KeyError:
             pass
         else:
-            raise KeyError, "Key property (%s) of retired node clashes with \
-                existing one (%s)" % (key, node[key])
+            raise KeyError("Key property (%s) of retired node clashes "
+                "with existing one (%s)" % (key, node[key]))
 
         self.fireAuditors('restore', nodeid, None)
         # use the arg for __retired__ to cope with any odd database type
@@ -1911,26 +2023,9 @@
         if there are any references to the node.
         """
         if self.db.journaltag is None:
-            raise DatabaseError, _('Database open read-only')
+            raise DatabaseError(_('Database open read-only'))
         self.db.destroynode(self.classname, nodeid)
 
-    def history(self, nodeid):
-        """Retrieve the journal of edits on a particular node.
-
-        'nodeid' must be the id of an existing node of this class or an
-        IndexError is raised.
-
-        The returned list contains tuples of the form
-
-            (nodeid, date, tag, action, params)
-
-        'date' is a Timestamp object specifying the time of the change and
-        'tag' is the journaltag specified when the database was opened.
-        """
-        if not self.do_journal:
-            raise ValueError, 'Journalling is disabled for this class'
-        return self.db.getjournal(self.classname, nodeid)
-
     # Locating nodes:
     def hasnode(self, nodeid):
         """Determine if the given nodeid actually exists
@@ -1946,7 +2041,7 @@
         """
         prop = self.getprops()[propname]
         if not isinstance(prop, String):
-            raise TypeError, 'key properties must be String'
+            raise TypeError('key properties must be String')
         self.key = propname
 
     def getkey(self):
@@ -1962,7 +2057,7 @@
         otherwise a KeyError is raised.
         """
         if not self.key:
-            raise TypeError, 'No key property set for class %s'%self.classname
+            raise TypeError('No key property set for class %s'%self.classname)
 
         # use the arg to handle any odd database type conversion (hello,
         # sqlite)
@@ -1973,8 +2068,8 @@
         # see if there was a result that's not retired
         row = self.db.sql_fetchone()
         if not row:
-            raise KeyError, 'No key (%s) value "%s" for "%s"'%(self.key,
-                keyvalue, self.classname)
+            raise KeyError('No key (%s) value "%s" for "%s"'%(self.key,
+                keyvalue, self.classname))
 
         # return the id
         # XXX numeric ids
@@ -2001,30 +2096,29 @@
 
         # validate the args
         props = self.getprops()
-        propspec = propspec.items()
-        for propname, nodeids in propspec:
+        for propname, nodeids in propspec.iteritems():
             # check the prop is OK
             prop = props[propname]
             if not isinstance(prop, Link) and not isinstance(prop, Multilink):
-                raise TypeError, "'%s' not a Link/Multilink property"%propname
+                raise TypeError("'%s' not a Link/Multilink property"%propname)
 
         # first, links
         a = self.db.arg
         allvalues = ()
         sql = []
         where = []
-        for prop, values in propspec:
+        for prop, values in propspec.iteritems():
             if not isinstance(props[prop], hyperdb.Link):
                 continue
             if type(values) is type({}) and len(values) == 1:
-                values = values.keys()[0]
+                values = list(values)[0]
             if type(values) is type(''):
                 allvalues += (values,)
                 where.append('_%s = %s'%(prop, a))
             elif values is None:
                 where.append('_%s is NULL'%prop)
             else:
-                values = values.keys()
+                values = list(values)
                 s = ''
                 if None in values:
                     values.remove(None)
@@ -2038,7 +2132,7 @@
                 and %s"""%(self.classname, a, ' and '.join(where)))
 
         # now multilinks
-        for prop, values in propspec:
+        for prop, values in propspec.iteritems():
             if not isinstance(props[prop], hyperdb.Multilink):
                 continue
             if not values:
@@ -2048,7 +2142,7 @@
                 allvalues += (values,)
                 s = a
             else:
-                allvalues += tuple(values.keys())
+                allvalues += tuple(values)
                 s = ','.join([a]*len(values))
             tn = '%s_%s'%(self.classname, prop)
             sql.append("""select id from _%s, %s where  __retired__=%s
@@ -2073,10 +2167,10 @@
         """
         where = []
         args = []
-        for propname in requirements.keys():
+        for propname in requirements:
             prop = self.properties[propname]
             if not isinstance(prop, String):
-                raise TypeError, "'%s' not a String property"%propname
+                raise TypeError("'%s' not a String property"%propname)
             where.append(propname)
             args.append(requirements[propname].lower())
 
@@ -2135,32 +2229,106 @@
     # The format parameter is replaced with the attribute.
     order_by_null_values = None
 
-    def filter(self, search_matches, filterspec, sort=[], group=[]):
-        """Return a list of the ids of the active nodes in this class that
-        match the 'filter' spec, sorted by the group spec and then the
-        sort spec
-
-        "filterspec" is {propname: value(s)}
-
-        "sort" and "group" are [(dir, prop), ...] where dir is '+', '-'
-        or None and prop is a prop name or None. Note that for
-        backward-compatibility reasons a single (dir, prop) tuple is
-        also allowed.
-
-        "search_matches" is a container type or None
+    def supports_subselects(self): 
+        '''Assuming DBs can do subselects, overwrite if they cannot.
+	'''
+        return True
+
+    def _filter_multilink_expression_fallback(
+        self, classname, multilink_table, expr):
+        '''This is a fallback for database that do not support
+           subselects.'''
+
+        is_valid = expr.evaluate
+
+        last_id, kws = None, []
+
+        ids = IdListOptimizer()
+        append = ids.append
+
+        # This join and the evaluation in program space
+        # can be expensive for larger databases!
+        # TODO: Find a faster way to collect the data needed
+        # to evalute the expression.
+        # Moving the expression evaluation into the database
+        # would be nice but this tricky: Think about the cases
+        # where the multilink table does not have join values
+        # needed in evaluation.
+
+        stmnt = "SELECT c.id, m.linkid FROM _%s c " \
+                "LEFT OUTER JOIN %s m " \
+                "ON c.id = m.nodeid ORDER BY c.id" % (
+                    classname, multilink_table)
+        self.db.sql(stmnt)
+
+        # collect all multilink items for a class item
+        for nid, kw in self.db.sql_fetchiter():
+            if nid != last_id:
+                if last_id is None:
+                    last_id = nid
+                else:
+                    # we have all multilink items -> evaluate!
+                    if is_valid(kws): append(last_id)
+                    last_id, kws = nid, []
+            if kw is not None:
+                kws.append(kw)
+
+        if last_id is not None and is_valid(kws): 
+            append(last_id)
+
+        # we have ids of the classname table
+        return ids.where("_%s.id" % classname, self.db.arg)
+
+    def _filter_multilink_expression(self, classname, multilink_table, v):
+        """ Filters out elements of the classname table that do not
+            match the given expression.
+            Returns tuple of 'WHERE' introns for the overall filter.
+        """
+        try:
+            opcodes = [int(x) for x in v]
+            if min(opcodes) >= -1: raise ValueError()
 
-        The filter must match all properties specificed. If the property
-        value to match is a list:
+            expr = compile_expression(opcodes)
 
-        1. String properties must match all elements in the list, and
-        2. Other properties must match any of the elements in the list.
+            if not self.supports_subselects():
+                # We heavily rely on subselects. If there is
+                # no decent support fall back to slower variant.
+                return self._filter_multilink_expression_fallback(
+                    classname, multilink_table, expr)
+
+            atom = \
+                "%s IN(SELECT linkid FROM %s WHERE nodeid=a.id)" % (
+                self.db.arg,
+                multilink_table)
+
+            intron = \
+                "_%(classname)s.id in (SELECT id " \
+                "FROM _%(classname)s AS a WHERE %(condition)s) " % {
+                    'classname' : classname,
+                    'condition' : expr.generate(lambda n: atom) }
+
+            values = []
+            def collect_values(n): values.append(n.x)
+            expr.visit(collect_values)
+
+            return intron, values
+        except:
+            # original behavior
+            where = "%s.linkid in (%s)" % (
+                multilink_table, ','.join([self.db.arg] * len(v)))
+            return where, v, True # True to indicate original
+
+    def _filter_sql (self, search_matches, filterspec, srt=[], grp=[], retr=0):
+        """ Compute the proptree and the SQL/ARGS for a filter.
+        For argument description see filter below.
+        We return a 3-tuple, the proptree, the sql and the sql-args
+        or None if no SQL is necessary.
+        The flag retr serves to retrieve *all* non-Multilink properties
+        (for filling the cache during a filter_iter)
         """
         # we can't match anything if search_matches is empty
         if not search_matches and search_matches is not None:
-            return []
-
-        if __debug__:
-            start_t = time.time()
+            return None
 
         icn = self.classname
 
@@ -2173,8 +2341,8 @@
 
         # figure the WHERE clause from the filterspec
         mlfilt = 0      # are we joining with Multilink tables?
-        sortattr = self._sortattr (group = group, sort = sort)
-        proptree = self._proptree(filterspec, sortattr)
+        sortattr = self._sortattr (group = grp, sort = srt)
+        proptree = self._proptree(filterspec, sortattr, retr)
         mlseen = 0
         for pt in reversed(proptree.sortattr):
             p = pt
@@ -2189,12 +2357,11 @@
                 pt.attr_sort_done = pt.tree_sort_done = True
         proptree.compute_sort_done()
 
-        ordercols = []
-        auxcols = {}
+        cols = ['_%s.id'%icn]
         mlsort = []
         rhsnum = 0
         for p in proptree:
-            oc = None
+            rc = ac = oc = None
             cn = p.classname
             ln = p.uniqname
             pln = p.parent.uniqname
@@ -2202,10 +2369,13 @@
             k = p.name
             v = p.val
             propclass = p.propclass
-            if p.sort_type > 0:
-                oc = ac = '_%s._%s'%(pln, k)
+            if p.parent == proptree and p.name == 'id' \
+                and 'retrieve' in p.need_for:
+                p.sql_idx = 0
+            if 'sort' in p.need_for or 'retrieve' in p.need_for:
+                rc = oc = ac = '_%s._%s'%(pln, k)
             if isinstance(propclass, Multilink):
-                if p.sort_type < 2:
+                if 'search' in p.need_for:
                     mlfilt = 1
                     tn = '%s_%s'%(pcn, k)
                     if v in ('-1', ['-1'], []):
@@ -2214,33 +2384,47 @@
                         where.append(self._subselect(pcn, tn))
                     else:
                         frum.append(tn)
-                        where.append('_%s.id=%s.nodeid'%(pln,tn))
+                        gen_join = True
+
+                        if p.has_values and isinstance(v, type([])):
+                            result = self._filter_multilink_expression(pln, tn, v)
+                            # XXX: We dont need an id join if we used the filter
+                            gen_join = len(result) == 3
+
+                        if gen_join:
+                            where.append('_%s.id=%s.nodeid'%(pln,tn))
+
                         if p.children:
                             frum.append('_%s as _%s' % (cn, ln))
                             where.append('%s.linkid=_%s.id'%(tn, ln))
+
                         if p.has_values:
                             if isinstance(v, type([])):
-                                s = ','.join([a for x in v])
-                                where.append('%s.linkid in (%s)'%(tn, s))
-                                args = args + v
+                                where.append(result[0])
+                                args += result[1]
                             else:
                                 where.append('%s.linkid=%s'%(tn, a))
                                 args.append(v)
-                if p.sort_type > 0:
+                if 'sort' in p.need_for:
                     assert not p.attr_sort_done and not p.sort_ids_needed
             elif k == 'id':
-                if p.sort_type < 2:
+                if 'search' in p.need_for:
                     if isinstance(v, type([])):
+                        # If there are no permitted values, then the
+                        # where clause will always be false, and we
+                        # can optimize the query away.
+                        if not v:
+                            return []
                         s = ','.join([a for x in v])
                         where.append('_%s.%s in (%s)'%(pln, k, s))
                         args = args + v
                     else:
                         where.append('_%s.%s=%s'%(pln, k, a))
                         args.append(v)
-                if p.sort_type > 0:
-                    oc = ac = '_%s.id'%pln
+                if 'sort' in p.need_for or 'retrieve' in p.need_for:
+                    rc = oc = ac = '_%s.id'%pln
             elif isinstance(propclass, String):
-                if p.sort_type < 2:
+                if 'search' in p.need_for:
                     if not isinstance(v, type([])):
                         v = [v]
 
@@ -2254,12 +2438,12 @@
                         +' and '.join(["_%s._%s LIKE '%s'"%(pln, k, s) for s in v])
                         +')')
                     # note: args are embedded in the query string now
-                if p.sort_type > 0:
+                if 'sort' in p.need_for:
                     oc = ac = 'lower(_%s._%s)'%(pln, k)
             elif isinstance(propclass, Link):
-                if p.sort_type < 2:
+                if 'search' in p.need_for:
                     if p.children:
-                        if p.sort_type == 0:
+                        if 'sort' not in p.need_for:
                             frum.append('_%s as _%s' % (cn, ln))
                         where.append('_%s._%s=_%s.id'%(pln, k, ln))
                     if p.has_values:
@@ -2270,11 +2454,11 @@
                                     entry = None
                                 d[entry] = entry
                             l = []
-                            if d.has_key(None) or not d:
-                                if d.has_key(None): del d[None]
+                            if None in d or not d:
+                                if None in d: del d[None]
                                 l.append('_%s._%s is NULL'%(pln, k))
                             if d:
-                                v = d.keys()
+                                v = list(d)
                                 s = ','.join([a for x in v])
                                 l.append('(_%s._%s in (%s))'%(pln, k, s))
                                 args = args + v
@@ -2287,16 +2471,18 @@
                             else:
                                 where.append('_%s._%s=%s'%(pln, k, a))
                                 args.append(v)
-                if p.sort_type > 0:
+                if 'sort' in p.need_for:
                     lp = p.cls.labelprop()
                     oc = ac = '_%s._%s'%(pln, k)
                     if lp != 'id':
-                        if p.tree_sort_done and p.sort_type > 0:
+                        if p.tree_sort_done:
                             loj.append(
                                 'LEFT OUTER JOIN _%s as _%s on _%s._%s=_%s.id'%(
                                 cn, ln, pln, k, ln))
                         oc = '_%s._%s'%(ln, lp)
-            elif isinstance(propclass, Date) and p.sort_type < 2:
+                if 'retrieve' in p.need_for:
+                    rc = '_%s._%s'%(pln, k)
+            elif isinstance(propclass, Date) and 'search' in p.need_for:
                 dc = self.db.to_sql_value(hyperdb.Date)
                 if isinstance(v, type([])):
                     s = ','.join([a for x in v])
@@ -2317,7 +2503,7 @@
                         pass
             elif isinstance(propclass, Interval):
                 # filter/sort using the __<prop>_int__ column
-                if p.sort_type < 2:
+                if 'search' in p.need_for:
                     if isinstance(v, type([])):
                         s = ','.join([a for x in v])
                         where.append('_%s.__%s_int__ in (%s)'%(pln, k, s))
@@ -2335,9 +2521,29 @@
                         except ValueError:
                             # If range creation fails - ignore search parameter
                             pass
-                if p.sort_type > 0:
+                if 'sort' in p.need_for:
                     oc = ac = '_%s.__%s_int__'%(pln,k)
-            elif p.sort_type < 2:
+                if 'retrieve' in p.need_for:
+                    rc = '_%s._%s'%(pln,k)
+            elif isinstance(propclass, Boolean) and 'search' in p.need_for:
+                if type(v) == type(""):
+                    v = v.split(',')
+                if type(v) != type([]):
+                    v = [v]
+                bv = []
+                for val in v:
+                    if type(val) is type(''):
+                        bv.append(propclass.from_raw (val))
+                    else:
+                        bv.append(bool(val))
+                if len(bv) == 1:
+                    where.append('_%s._%s=%s'%(pln, k, a))
+                    args = args + bv
+                else:
+                    s = ','.join([a for x in v])
+                    where.append('_%s._%s in (%s)'%(pln, k, s))
+                    args = args + bv
+            elif 'search' in p.need_for:
                 if isinstance(v, type([])):
                     s = ','.join([a for x in v])
                     where.append('_%s._%s in (%s)'%(pln, k, s))
@@ -2347,18 +2553,28 @@
                     args.append(v)
             if oc:
                 if p.sort_ids_needed:
-                    auxcols[ac] = p
+                    if rc == ac:
+                        p.sql_idx = len(cols)
+                    p.auxcol = len(cols)
+                    cols.append(ac)
                 if p.tree_sort_done and p.sort_direction:
-                    # Don't select top-level id twice
-                    if p.name != 'id' or p.parent != proptree:
-                        ordercols.append(oc)
+                    # Don't select top-level id or multilink twice
+                    if (not p.sort_ids_needed or ac != oc) and (p.name != 'id'
+                        or p.parent != proptree):
+                        if rc == oc:
+                            p.sql_idx = len(cols)
+                        cols.append(oc)
                     desc = ['', ' desc'][p.sort_direction == '-']
                     # Some SQL dbs sort NULL values last -- we want them first.
                     if (self.order_by_null_values and p.name != 'id'):
                         nv = self.order_by_null_values % oc
-                        ordercols.append(nv)
+                        cols.append(nv)
                         p.orderby.append(nv + desc)
                     p.orderby.append(oc + desc)
+            if 'retrieve' in p.need_for and p.sql_idx is None:
+                assert(rc)
+                p.sql_idx = len(cols)
+                cols.append (rc)
 
         props = self.getprops()
 
@@ -2381,11 +2597,8 @@
         if mlfilt:
             # we're joining tables on the id, so we will get dupes if we
             # don't distinct()
-            cols = ['distinct(_%s.id)'%icn]
-        else:
-            cols = ['_%s.id'%icn]
-        if ordercols:
-            cols = cols + ordercols
+            cols[0] = 'distinct(_%s.id)'%icn
+
         order = []
         # keep correct sequence of order attributes.
         for sa in proptree.sortattr:
@@ -2396,21 +2609,50 @@
             order = ' order by %s'%(','.join(order))
         else:
             order = ''
-        for o, p in auxcols.iteritems ():
-            cols.append (o)
-            p.auxcol = len (cols) - 1
 
         cols = ','.join(cols)
         loj = ' '.join(loj)
         sql = 'select %s from %s %s %s%s'%(cols, frum, loj, where, order)
         args = tuple(args)
         __traceback_info__ = (sql, args)
+        return proptree, sql, args
+
+    def filter(self, search_matches, filterspec, sort=[], group=[]):
+        """Return a list of the ids of the active nodes in this class that
+        match the 'filter' spec, sorted by the group spec and then the
+        sort spec
+
+        "filterspec" is {propname: value(s)}
+
+        "sort" and "group" are [(dir, prop), ...] where dir is '+', '-'
+        or None and prop is a prop name or None. Note that for
+        backward-compatibility reasons a single (dir, prop) tuple is
+        also allowed.
+
+        "search_matches" is a container type or None
+
+        The filter must match all properties specificed. If the property
+        value to match is a list:
+
+        1. String properties must match all elements in the list, and
+        2. Other properties must match any of the elements in the list.
+        """
+        if __debug__:
+            start_t = time.time()
+
+        sq = self._filter_sql (search_matches, filterspec, sort, group)
+        # nothing to match?
+        if sq is None:
+            return []
+        proptree, sql, args = sq
+
         self.db.sql(sql, args)
         l = self.db.sql_fetchall()
 
         # Compute values needed for sorting in proptree.sort
-        for p in auxcols.itervalues():
-            p.sort_ids = p.sort_result = [row[p.auxcol] for row in l]
+        for p in proptree:
+            if hasattr(p, 'auxcol'):
+                p.sort_ids = p.sort_result = [row[p.auxcol] for row in l]
         # return the IDs (the first column)
         # XXX numeric ids
         l = [str(row[0]) for row in l]
@@ -2420,6 +2662,53 @@
             self.db.stats['filtering'] += (time.time() - start_t)
         return l
 
+    def filter_iter(self, search_matches, filterspec, sort=[], group=[]):
+        """Iterator similar to filter above with same args.
+        Limitation: We don't sort on multilinks.
+        This uses an optimisation: We put all nodes that are in the
+        current row into the node cache. Then we return the node id.
+        That way a fetch of a node won't create another sql-fetch (with
+        a join) from the database because the nodes are already in the
+        cache. We're using our own temporary cursor.
+        """
+        sq = self._filter_sql(search_matches, filterspec, sort, group, retr=1)
+        # nothing to match?
+        if sq is None:
+            return
+        proptree, sql, args = sq
+        cursor = self.db.conn.cursor()
+        self.db.sql(sql, args, cursor)
+        classes = {}
+        for p in proptree:
+            if 'retrieve' in p.need_for:
+                cn = p.parent.classname
+                ptid = p.parent.id # not the nodeid!
+                key = (cn, ptid)
+                if key not in classes:
+                    classes[key] = {}
+                name = p.name
+                assert (name)
+                classes[key][name] = p
+                p.to_hyperdb = self.db.to_hyperdb_value(p.propclass.__class__)
+        while True:
+            row = cursor.fetchone()
+            if not row: break
+            # populate cache with current items
+            for (classname, ptid), pt in classes.iteritems():
+                nodeid = str(row[pt['id'].sql_idx])
+                key = (classname, nodeid)
+                if key in self.db.cache:
+                    self.db._cache_refresh(key)
+                    continue
+                node = {}
+                for propname, p in pt.iteritems():
+                    value = row[p.sql_idx]
+                    if value is not None:
+                        value = p.to_hyperdb(value)
+                    node[propname] = value
+                self.db._cache_save(key, node)
+            yield str(row[0])
+
     def filter_sql(self, sql):
         """Return a list of the ids of the items in this class that match
         the SQL provided. The SQL is a complete "select" statement.
@@ -2471,16 +2760,16 @@
         may collide with the names of existing properties, or a ValueError
         is raised before any properties have been added.
         """
-        for key in properties.keys():
-            if self.properties.has_key(key):
-                raise ValueError, key
+        for key in properties:
+            if key in self.properties:
+                raise ValueError(key)
         self.properties.update(properties)
 
     def index(self, nodeid):
         """Add (or refresh) the node to search indexes
         """
         # find all the String properties that have indexme
-        for prop, propclass in self.getprops().items():
+        for prop, propclass in self.getprops().iteritems():
             if isinstance(propclass, String) and propclass.indexme:
                 self.db.indexer.add_text((self.classname, nodeid, prop),
                     str(self.get(nodeid, prop)))
@@ -2519,7 +2808,7 @@
             Return the nodeid of the node imported.
         """
         if self.db.journaltag is None:
-            raise DatabaseError, _('Database open read-only')
+            raise DatabaseError(_('Database open read-only'))
         properties = self.getprops()
 
         # make the new node's property map
@@ -2557,16 +2846,13 @@
             elif isinstance(prop, hyperdb.Interval):
                 value = date.Interval(value)
             elif isinstance(prop, hyperdb.Password):
-                pwd = password.Password()
-                pwd.unpack(value)
-                value = pwd
+                value = password.Password(encrypted=value)
             elif isinstance(prop, String):
                 if isinstance(value, unicode):
                     value = value.encode('utf8')
                 if not isinstance(value, str):
-                    raise TypeError, \
-                        'new property "%(propname)s" not a string: %(value)r' \
-                        % locals()
+                    raise TypeError('new property "%(propname)s" not a '
+                        'string: %(value)r'%locals())
                 if prop.indexme:
                     self.db.indexer.add_text((self.classname, newid, propname),
                         value)
@@ -2606,8 +2892,8 @@
                 date = date.get_tuple()
                 if action == 'set':
                     export_data = {}
-                    for propname, value in params.items():
-                        if not properties.has_key(propname):
+                    for propname, value in params.iteritems():
+                        if propname not in properties:
                             # property no longer in the schema
                             continue
 
@@ -2627,41 +2913,9 @@
                     # old tracker with data stored in the create!
                     params = {}
                 l = [nodeid, date, user, action, params]
-                r.append(map(repr, l))
+                r.append(list(map(repr, l)))
         return r
 
-    def import_journals(self, entries):
-        """Import a class's journal.
-
-        Uses setjournal() to set the journal for each item."""
-        properties = self.getprops()
-        d = {}
-        for l in entries:
-            l = map(eval, l)
-            nodeid, jdate, user, action, params = l
-            r = d.setdefault(nodeid, [])
-            if action == 'set':
-                for propname, value in params.items():
-                    prop = properties[propname]
-                    if value is None:
-                        pass
-                    elif isinstance(prop, Date):
-                        value = date.Date(value)
-                    elif isinstance(prop, Interval):
-                        value = date.Interval(value)
-                    elif isinstance(prop, Password):
-                        pwd = password.Password()
-                        pwd.unpack(value)
-                        value = pwd
-                    params[propname] = value
-            elif action == 'create' and params:
-                # old tracker with data stored in the create!
-                params = {}
-            r.append((nodeid, date.Date(jdate), user, action, params))
-
-        for nodeid, l in d.items():
-            self.db.setjournal(self.classname, nodeid, l)
-
 class FileClass(hyperdb.FileClass, Class):
     """This class defines a large chunk of data. To support this, it has a
        mandatory String property "content" which is typically saved off
@@ -2675,9 +2929,9 @@
         """The newly-created class automatically includes the "content"
         and "type" properties.
         """
-        if not properties.has_key('content'):
+        if 'content' not in properties:
             properties['content'] = hyperdb.String(indexme='yes')
-        if not properties.has_key('type'):
+        if 'type' not in properties:
             properties['type'] = hyperdb.String()
         Class.__init__(self, db, classname, **properties)
 
@@ -2720,7 +2974,7 @@
         if propname == 'content':
             try:
                 return self.db.getfile(self.classname, nodeid, None)
-            except IOError, (strerror):
+            except IOError, strerror:
                 # BUG: by catching this we donot see an error in the log.
                 return 'ERROR reading file: %s%s\n%s\n%s'%(
                         self.classname, nodeid, poss_msg, strerror)
@@ -2737,7 +2991,7 @@
 
         # now remove the content property so it's not stored in the db
         content = None
-        if propvalues.has_key('content'):
+        if 'content' in propvalues:
             content = propvalues['content']
             del propvalues['content']
 
@@ -2764,7 +3018,7 @@
         Use the content-type property for the content property.
         """
         # find all the String properties that have indexme
-        for prop, propclass in self.getprops().items():
+        for prop, propclass in self.getprops().iteritems():
             if prop == 'content' and propclass.indexme:
                 mime_type = self.get(nodeid, 'type', self.default_mime_type)
                 self.db.indexer.add_text((self.classname, nodeid, 'content'),
@@ -2788,17 +3042,17 @@
         "creation", "creator", "activity" or "actor" property, a ValueError
         is raised.
         """
-        if not properties.has_key('title'):
+        if 'title' not in properties:
             properties['title'] = hyperdb.String(indexme='yes')
-        if not properties.has_key('messages'):
+        if 'messages' not in properties:
             properties['messages'] = hyperdb.Multilink("msg")
-        if not properties.has_key('files'):
+        if 'files' not in properties:
             properties['files'] = hyperdb.Multilink("file")
-        if not properties.has_key('nosy'):
+        if 'nosy' not in properties:
             # note: journalling is turned off as it really just wastes
             # space. this behaviour may be overridden in an instance
             properties['nosy'] = hyperdb.Multilink("user", do_journal="no")
-        if not properties.has_key('superseder'):
+        if 'superseder' not in properties:
             properties['superseder'] = hyperdb.Multilink(classname)
         Class.__init__(self, db, classname, **properties)
 

Modified: tracker/roundup-src/roundup/backends/sessions_dbm.py
==============================================================================
--- tracker/roundup-src/roundup/backends/sessions_dbm.py	(original)
+++ tracker/roundup-src/roundup/backends/sessions_dbm.py	Thu Aug  4 15:46:52 2011
@@ -7,9 +7,11 @@
 """
 __docformat__ = 'restructuredtext'
 
-import anydbm, whichdb, os, marshal, time
+import os, marshal, time
+
 from roundup import hyperdb
 from roundup.i18n import _
+from roundup.anypy.dbm_ import anydbm, whichdb, key_in
 
 class BasicDatabase:
     ''' Provide a nice encapsulation of an anydbm store.
@@ -26,7 +28,7 @@
     def exists(self, infoid):
         db = self.opendb('c')
         try:
-            return db.has_key(infoid)
+            return key_in(db, infoid)
         finally:
             db.close()
 
@@ -44,10 +46,10 @@
         '''
         db_type = ''
         if os.path.exists(path):
-            db_type = whichdb.whichdb(path)
+            db_type = whichdb(path)
             if not db_type:
-                raise hyperdb.DatabaseError, \
-                    _("Couldn't identify database type")
+                raise hyperdb.DatabaseError(
+                    _("Couldn't identify database type"))
         elif os.path.exists(path+'.db'):
             # if the path ends in '.db', it's a dbm database, whether
             # anydbm says it's dbhash or not!
@@ -58,12 +60,12 @@
     def get(self, infoid, value, default=_marker):
         db = self.opendb('c')
         try:
-            if db.has_key(infoid):
+            if key_in(db, infoid):
                 values = marshal.loads(db[infoid])
             else:
                 if default != self._marker:
                     return default
-                raise KeyError, 'No such %s "%s"'%(self.name, infoid)
+                raise KeyError('No such %s "%s"'%(self.name, infoid))
             return values.get(value, None)
         finally:
             db.close()
@@ -76,14 +78,14 @@
                 del d['__timestamp']
                 return d
             except KeyError:
-                raise KeyError, 'No such %s "%s"'%(self.name, infoid)
+                raise KeyError('No such %s "%s"'%(self.name, infoid))
         finally:
             db.close()
 
     def set(self, infoid, **newvalues):
         db = self.opendb('c')
         try:
-            if db.has_key(infoid):
+            if key_in(db, infoid):
                 values = marshal.loads(db[infoid])
             else:
                 values = {'__timestamp': time.time()}
@@ -95,14 +97,14 @@
     def list(self):
         db = self.opendb('r')
         try:
-            return db.keys()
+            return list(db.keys())
         finally:
             db.close()
 
     def destroy(self, infoid):
         db = self.opendb('c')
         try:
-            if db.has_key(infoid):
+            if key_in(db, infoid):
                 del db[infoid]
         finally:
             db.close()

Modified: tracker/roundup-src/roundup/backends/sessions_rdbms.py
==============================================================================
--- tracker/roundup-src/roundup/backends/sessions_rdbms.py	(original)
+++ tracker/roundup-src/roundup/backends/sessions_rdbms.py	Thu Aug  4 15:46:52 2011
@@ -36,7 +36,7 @@
         if not res:
             if default != self._marker:
                 return default
-            raise KeyError, 'No such %s "%s"'%(self.name, infoid)
+            raise KeyError('No such %s "%s"'%(self.name, infoid))
         values = eval(res[0])
         return values.get(value, None)
 
@@ -46,7 +46,7 @@
             n, n, self.db.arg), (infoid,))
         res = self.cursor.fetchone()
         if not res:
-            raise KeyError, 'No such %s "%s"'%(self.name, infoid)
+            raise KeyError('No such %s "%s"'%(self.name, infoid))
         return eval(res[0])
 
     def set(self, infoid, **newvalues):
@@ -72,6 +72,12 @@
             args = (infoid, time.time(), repr(values))
         c.execute(sql, args)
 
+    def list(self):
+        c = self.cursor
+        n = self.name
+        c.execute('select %s_key from %ss'%(n, n))
+        return [res[0] for res in c.fetchall()]
+
     def destroy(self, infoid):
         self.cursor.execute('delete from %ss where %s_key=%s'%(self.name,
             self.name, self.db.arg), (infoid,))

Added: tracker/roundup-src/roundup/cgi/KeywordsExpr.py
==============================================================================
--- (empty file)
+++ tracker/roundup-src/roundup/cgi/KeywordsExpr.py	Thu Aug  4 15:46:52 2011
@@ -0,0 +1,273 @@
+# This module is free software, you may redistribute it
+# and/or modify under the same terms as Python.
+
+WINDOW_CONTENT = '''\
+<h3>Keyword Expression Editor:</h3>
+<hr/>
+<div id="content"></div>
+<script type="text/javascript">
+<!--
+
+var NOT_OP = "-2";
+var AND_OP = "-3";
+var OR_OP  = "-4";
+
+var original = "%(original)s";
+var current = original;
+var undo = [];
+
+var KEYWORDS = [
+    %(keywords)s
+];
+
+function find_keyword(x) {
+    for (var i = 0; i < KEYWORDS.length; ++i) {
+        if (KEYWORDS[i][0] == x) {
+            return KEYWORDS[i][1];
+        }
+    }
+    return "unknown";
+}
+
+function Equals(x) {
+    this.x = x;
+    this.brackets = false;
+
+    this.infix = function() {
+        return find_keyword(this.x);
+    }
+
+    this.postfix = function() {
+        return this.x;
+    }
+}
+
+function Not(x) {
+    this.x = x;
+    this.brackets = false;
+
+    this.infix = function() {
+        return this.x.brackets 
+            ? "NOT(" + this.x.infix() + ")"
+            : "NOT " + this.x.infix();
+    }
+
+    this.postfix = function() {
+        return this.x.postfix() + "," + NOT_OP;
+    }
+}
+
+function And(x, y) {
+    this.x = x;
+    this.y = y;
+    this.brackets = true;
+
+    this.infix = function() {
+        var a = this.x.brackets ? "(" + this.x.infix() + ")" : this.x.infix();
+        var b = this.y.brackets ? "(" + this.y.infix() + ")" : this.y.infix();
+        return a + " AND " + b;
+    }
+    this.postfix = function() {
+        return this.x.postfix() + "," + this.y.postfix() + "," + AND_OP;
+    }
+}
+
+function Or(x, y) {
+    this.x = x;
+    this.y = y;
+    this.brackets = true;
+
+    this.infix = function() {
+        var a = this.x.brackets ? "(" + this.x.infix() + ")" : this.x.infix();
+        var b = this.y.brackets ? "(" + this.y.infix() + ")" : this.y.infix();
+        return a + " OR " + b;
+    }
+
+    this.postfix = function() {
+        return this.x.postfix() + "," + this.y.postfix() + "," + OR_OP;
+    }
+}
+
+function trim(s) {
+    return s.replace (/^\s+/, '').replace(/\s+$/, '');
+}
+
+function parse(s) {
+    var operators = s.split(",");
+    var stack = [];
+    for (var i = 0; i < operators.length; ++i) {
+        var operator = trim(operators[i]);
+        if (operator == "") continue;
+        if (operator == NOT_OP) {
+            stack.push(new Not(stack.pop()));
+        }
+        else if (operator == AND_OP) {
+            var a = stack.pop();
+            var b = stack.pop();
+            stack.push(new And(b, a));
+        }
+        else if (operator == OR_OP) {
+            var a = stack.pop();
+            var b = stack.pop();
+            stack.push(new Or(b, a));
+        }
+        else {
+            stack.push(new Equals(operator));
+        }
+    }
+    return stack.length > 0 ? stack.pop() : null;
+}
+
+function render_select(handler) {
+    var out = '<select name="keyword" id="keyword"';
+    if (handler != null) {
+        out += ' onchange="' + handler + '"';
+    }
+    out += '>';
+    out += '<option value="-1"><\/option>';
+    for (var i = 0; i < KEYWORDS.length; ++i) {
+        out += '<option value="' + KEYWORDS[i][0] + 
+               '">' + KEYWORDS[i][1] + "<\/option>";
+    }
+    out += '<\/select>';
+    return out;
+}
+
+function first_select() {
+    var value = document.getElementById("keyword").value;
+    current = value;
+    set_content();
+}
+
+function not_clicked() {
+    var expr = parse(current);
+    if (expr == null) return;
+    undo.push(current);
+    current = expr instanceof Not
+        ? expr.x.postfix()
+        : new Not(expr).postfix();
+    set_content();
+}
+
+function not_b_wrap(expr) {
+    var value = document.getElementById("not_b").checked;
+    return value ? new Not(expr) : expr;
+}
+
+function and_clicked() {
+    var expr = parse(current);
+    if (expr == null) return;
+    var value = document.getElementById("keyword").value;
+    if (value == "-1") return;
+    undo.push(current);
+    current = new And(expr, not_b_wrap(new Equals(value))).postfix();
+    set_content();
+}
+
+function or_clicked() {
+    var expr = parse(current);
+    if (expr == null) return;
+    var value = document.getElementById("keyword").value;
+    if (value == "-1") return;
+    undo.push(current);
+    current = new Or(expr, not_b_wrap(new Equals(value))).postfix();
+    set_content();
+}
+
+function undo_clicked() {
+    current = undo.length > 0 
+        ? undo.pop()
+        : original;
+    set_content();
+}
+
+function enable_and_or() {
+    var value = document.getElementById("keyword").value;
+    value = value == "-1";
+    document.getElementById("and").disabled = value;
+    document.getElementById("or").disabled = value;
+    document.getElementById("not_b").disabled = value;
+}
+
+function create() {
+    var expr = parse(current);
+    var out = "";
+    if (expr == null) {
+        out += "Keyword: ";
+        out += render_select("first_select();");
+    }
+    else {
+        out += '<table><tr>'
+        out += '<td><input type="button" name="not" onclick="not_clicked();" value="NOT"\/><\/td>';
+        out += "<td><tt><strong>" + expr.infix() + "<\/strong><\/tt><\/td>";
+        out += '<td><table>';
+        out += '<tr><td><input type="button" id="and" name="and" onclick="and_clicked();"'
+            +  ' value="AND" disabled="disabled"\/><\/td><\/tr>';
+        out += '<tr><td><input type="button" id="or" name="or" onclick="or_clicked();"'
+            +  ' value="OR" disabled="disabled"\/><\/td><\/tr>';
+        out += '<\/table><\/td>';
+        out += '<td><label for="not_b">NOT<\/label><br/>'
+            +  '<input type="checkbox" name="not_b" id="not_b" disabled="disabled"\/><\/td>';
+        out += '<td>' + render_select("enable_and_or();") + '<\/td>';
+        out += '<\/tr><\/table>'
+    }
+    out += '<hr\/>';
+    if (undo.length > 0 || (undo.length == 0 && current != original)) {
+        out += '<input type="button" onclick="undo_clicked();" value="Undo"\/>';
+    }
+    out += '<input type="button" onclick="modify_main();" value="Apply"\/>'
+        +  '<input type="button" onclick="window.close();" value="Close Window"\/>';
+    return out;
+}
+
+function main_content() {
+    var out = '';
+    out += '<input type="hidden" name="%(prop)s" value="' + current + '"\/>';
+    out += parse(current).infix();
+    return out;
+}
+
+function modify_main() {
+    main = window.opener.document.getElementById("keywords_%(prop)s");
+    main.innerHTML = main_content();
+}
+
+function set_content() {
+    document.getElementById("content").innerHTML = create();
+}
+
+set_content();
+//-->
+</script>
+'''
+
+def list_nodes(request):
+    prop = request.form.getfirst("property")
+    cls = request.client.db.getclass(prop)
+    items = []
+    for nodeid in cls.getnodeids():
+        l = cls.getnode(nodeid).items()
+        l = dict([x for x in l if len(x) == 2])
+        try:
+            items.append((l['id'], l['name']))
+        except KeyError:
+            pass
+    items.sort(key=lambda x: int(x[0]))
+    return items
+
+def items_to_keywords(items):
+    return ',\n    '.join(['["%s", "%s"]' % x for x in items])
+   
+
+def render_keywords_expression_editor(request):
+    prop = request.form.getfirst("property")
+
+    window_content = WINDOW_CONTENT % {
+        'prop'    : prop,
+        'keywords': items_to_keywords(list_nodes(request)),
+        'original': ''
+    }
+
+    return window_content
+
+# vim: set et sts=4 sw=4 :

Modified: tracker/roundup-src/roundup/cgi/accept_language.py
==============================================================================
--- tracker/roundup-src/roundup/cgi/accept_language.py	(original)
+++ tracker/roundup-src/roundup/cgi/accept_language.py	Thu Aug  4 15:46:52 2011
@@ -35,7 +35,7 @@
 # both
 lre   = re.compile(nqlre + "|" + qlre)
 
-ascii = ''.join([chr(x) for x in xrange(256)])
+ascii = ''.join([chr(x) for x in range(256)])
 whitespace = ' \t\n\r\v\f'
 
 def parse(language_header):

Modified: tracker/roundup-src/roundup/cgi/actions.py
==============================================================================
--- tracker/roundup-src/roundup/cgi/actions.py	(original)
+++ tracker/roundup-src/roundup/cgi/actions.py	Thu Aug  4 15:46:52 2011
@@ -1,4 +1,4 @@
-import re, cgi, StringIO, urllib, time, random, csv, codecs
+import re, cgi, time, random, csv, codecs
 
 from roundup import hyperdb, token, date, password
 from roundup.actions import Action as BaseAction
@@ -6,6 +6,7 @@
 import roundup.exceptions
 from roundup.cgi import exceptions, templating
 from roundup.mailgw import uidFromAddress
+from roundup.anypy import io_, urllib_
 
 __all__ = ['Action', 'ShowAction', 'RetireAction', 'SearchAction',
            'EditCSVAction', 'EditItemAction', 'PassResetAction',
@@ -53,9 +54,9 @@
         if (self.permissionType and
                 not self.hasPermission(self.permissionType)):
             info = {'action': self.name, 'classname': self.classname}
-            raise exceptions.Unauthorised, self._(
+            raise exceptions.Unauthorised(self._(
                 'You do not have permission to '
-                '%(action)s the %(classname)s class.')%info
+                '%(action)s the %(classname)s class.')%info)
 
     _marker = []
     def hasPermission(self, permission, classname=_marker, itemid=None, property=None):
@@ -79,23 +80,23 @@
     def handle(self):
         """Show a node of a particular class/id."""
         t = n = ''
-        for key in self.form.keys():
+        for key in self.form:
             if self.typere.match(key):
                 t = self.form[key].value.strip()
             elif self.numre.match(key):
                 n = self.form[key].value.strip()
         if not t:
-            raise ValueError, self._('No type specified')
+            raise ValueError(self._('No type specified'))
         if not n:
-            raise exceptions.SeriousError, self._('No ID entered')
+            raise exceptions.SeriousError(self._('No ID entered'))
         try:
             int(n)
         except ValueError:
             d = {'input': n, 'classname': t}
-            raise exceptions.SeriousError, self._(
-                '"%(input)s" is not an ID (%(classname)s ID required)')%d
+            raise exceptions.SeriousError(self._(
+                '"%(input)s" is not an ID (%(classname)s ID required)')%d)
         url = '%s%s%s'%(self.base, t, n)
-        raise exceptions.Redirect, url
+        raise exceptions.Redirect(url)
 
 class RetireAction(Action):
     name = 'retire'
@@ -116,15 +117,15 @@
         # make sure we don't try to retire admin or anonymous
         if self.classname == 'user' and \
                 self.db.user.get(itemid, 'username') in ('admin', 'anonymous'):
-            raise ValueError, self._(
-                'You may not retire the admin or anonymous user')
+            raise ValueError(self._(
+                'You may not retire the admin or anonymous user'))
 
         # check permission
         if not self.hasPermission('Retire', classname=self.classname,
                 itemid=itemid):
-            raise exceptions.Unauthorised, self._(
+            raise exceptions.Unauthorised(self._(
                 'You do not have permission to retire %(class)s'
-            ) % {'class': self.classname}
+            ) % {'class': self.classname})
 
         # do the retire
         self.db.getclass(self.classname).retire(itemid)
@@ -171,14 +172,14 @@
                 try:
                     qid = self.db.query.lookup(old_queryname)
                     if not self.hasPermission('Edit', 'query', itemid=qid):
-                        raise exceptions.Unauthorised, self._(
-                            "You do not have permission to edit queries")
+                        raise exceptions.Unauthorised(self._(
+                            "You do not have permission to edit queries"))
                     self.db.query.set(qid, klass=self.classname, url=url)
                 except KeyError:
                     # create a query
                     if not self.hasPermission('Create', 'query'):
-                        raise exceptions.Unauthorised, self._(
-                            "You do not have permission to store queries")
+                        raise exceptions.Unauthorised(self._(
+                            "You do not have permission to store queries"))
                     qid = self.db.query.create(name=queryname,
                         klass=self.classname, url=url)
             else:
@@ -199,15 +200,15 @@
                         if old_queryname != self.db.query.get(qid, 'name'):
                             continue
                         if not self.hasPermission('Edit', 'query', itemid=qid):
-                            raise exceptions.Unauthorised, self._(
-                            "You do not have permission to edit queries")
+                            raise exceptions.Unauthorised(self._(
+                            "You do not have permission to edit queries"))
                         self.db.query.set(qid, klass=self.classname,
                             url=url, name=queryname)
                 else:
                     # create a query
                     if not self.hasPermission('Create', 'query'):
-                        raise exceptions.Unauthorised, self._(
-                            "You do not have permission to store queries")
+                        raise exceptions.Unauthorised(self._(
+                            "You do not have permission to store queries"))
                     qid = self.db.query.create(name=queryname,
                         klass=self.classname, url=url, private_for=uid)
 
@@ -223,7 +224,7 @@
     def fakeFilterVars(self):
         """Add a faked :filter form variable for each filtering prop."""
         cls = self.db.classes[self.classname]
-        for key in self.form.keys():
+        for key in self.form:
             prop = cls.get_transitive_prop(key)
             if not prop:
                 continue
@@ -269,7 +270,7 @@
 
     def getFromForm(self, name):
         for key in ('@' + name, ':' + name):
-            if self.form.has_key(key):
+            if key in self.form:
                 return self.form[key].value.strip()
         return ''
 
@@ -293,7 +294,7 @@
 
         # figure the properties list for the class
         cl = self.db.classes[self.classname]
-        props_without_id = cl.getprops(protected=0).keys()
+        props_without_id = list(cl.getprops(protected=0))
 
         # the incoming CSV data will always have the properties in colums
         # sorted and starting with the "id" column
@@ -301,7 +302,7 @@
         props = ['id'] + props_without_id
 
         # do the edit
-        rows = StringIO.StringIO(self.form['rows'].value)
+        rows = io_.BytesIO(self.form['rows'].value)
         reader = csv.reader(rows)
         found = {}
         line = 0
@@ -322,9 +323,14 @@
 
                 # check permission to create this item
                 if not self.hasPermission('Create', classname=self.classname):
-                    raise exceptions.Unauthorised, self._(
+                    raise exceptions.Unauthorised(self._(
                         'You do not have permission to create %(class)s'
-                    ) % {'class': self.classname}
+                    ) % {'class': self.classname})
+            elif cl.hasnode(itemid) and cl.is_retired(itemid):
+                # If a CSV line just mentions an id and the corresponding
+                # item is retired, then the item is restored.
+                cl.restore(itemid)
+                continue
             else:
                 exists = 1
 
@@ -340,9 +346,9 @@
                 # check permission to edit this property on this item
                 if exists and not self.hasPermission('Edit', itemid=itemid,
                         classname=self.classname, property=name):
-                    raise exceptions.Unauthorised, self._(
+                    raise exceptions.Unauthorised(self._(
                         'You do not have permission to edit %(class)s'
-                    ) % {'class': self.classname}
+                    ) % {'class': self.classname})
 
                 prop = cl.properties[name]
                 value = value.strip()
@@ -352,7 +358,7 @@
                     if isinstance(prop, hyperdb.Multilink):
                         value = value.split(':')
                     elif isinstance(prop, hyperdb.Password):
-                        value = password.Password(value)
+                        value = password.Password(value, config=self.db.config)
                     elif isinstance(prop, hyperdb.Interval):
                         value = date.Interval(value)
                     elif isinstance(prop, hyperdb.Date):
@@ -379,13 +385,13 @@
 
         # retire the removed entries
         for itemid in cl.list():
-            if not found.has_key(itemid):
+            if itemid not in found:
                 # check permission to retire this item
                 if not self.hasPermission('Retire', itemid=itemid,
                         classname=self.classname):
-                    raise exceptions.Unauthorised, self._(
+                    raise exceptions.Unauthorised(self._(
                         'You do not have permission to retire %(class)s'
-                    ) % {'class': self.classname}
+                    ) % {'class': self.classname})
                 cl.retire(itemid)
 
         # all OK
@@ -405,12 +411,12 @@
         links = {}
         for cn, nodeid, propname, vlist in all_links:
             numeric_id = int (nodeid or 0)
-            if not (numeric_id > 0 or all_props.has_key((cn, nodeid))):
+            if not (numeric_id > 0 or (cn, nodeid) in all_props):
                 # link item to link to doesn't (and won't) exist
                 continue
 
             for value in vlist:
-                if not all_props.has_key(value):
+                if value not in all_props:
                     # link item to link to doesn't (and won't) exist
                     continue
                 deps.setdefault((cn, nodeid), []).append(value)
@@ -422,19 +428,19 @@
         # loop detection
         change = 0
         while len(all_props) != len(done):
-            for needed in all_props.keys():
-                if done.has_key(needed):
+            for needed in all_props:
+                if needed in done:
                     continue
                 tlist = deps.get(needed, [])
                 for target in tlist:
-                    if not done.has_key(target):
+                    if target not in done:
                         break
                 else:
                     done[needed] = 1
                     order.append(needed)
                     change = 1
             if not change:
-                raise ValueError, 'linking must not loop!'
+                raise ValueError('linking must not loop!')
 
         # now, edit / create
         m = []
@@ -448,7 +454,7 @@
 
                     # and some nice feedback for the user
                     if props:
-                        info = ', '.join(map(self._, props.keys()))
+                        info = ', '.join(map(self._, props))
                         m.append(
                             self._('%(class)s %(id)s %(properties)s edited ok')
                             % {'class':cn, 'id':nodeid, 'properties':info})
@@ -469,18 +475,18 @@
                         % {'class':cn, 'id':newid})
 
             # fill in new ids in links
-            if links.has_key(needed):
+            if needed in links:
                 for linkcn, linkid, linkprop in links[needed]:
                     props = all_props[(linkcn, linkid)]
                     cl = self.db.classes[linkcn]
                     propdef = cl.getprops()[linkprop]
-                    if not props.has_key(linkprop):
+                    if linkprop not in props:
                         if linkid is None or linkid.startswith('-'):
                             # linking to a new item
                             if isinstance(propdef, hyperdb.Multilink):
-                                props[linkprop] = [newid]
+                                props[linkprop] = [nodeid]
                             else:
-                                props[linkprop] = newid
+                                props[linkprop] = nodeid
                         else:
                             # linking to an existing item
                             if isinstance(propdef, hyperdb.Multilink):
@@ -488,7 +494,7 @@
                                 existing.append(nodeid)
                                 props[linkprop] = existing
                             else:
-                                props[linkprop] = newid
+                                props[linkprop] = nodeid
 
         return '<br>'.join(m)
 
@@ -496,9 +502,9 @@
         """Change the node based on the contents of the form."""
         # check for permission
         if not self.editItemPermission(props, classname=cn, itemid=nodeid):
-            raise exceptions.Unauthorised, self._(
+            raise exceptions.Unauthorised(self._(
                 'You do not have permission to edit %(class)s'
-            ) % {'class': cn}
+            ) % {'class': cn})
 
         # make the changes
         cl = self.db.classes[cn]
@@ -508,9 +514,9 @@
         """Create a node based on the contents of the form."""
         # check for permission
         if not self.newItemPermission(props, classname=cn):
-            raise exceptions.Unauthorised, self._(
+            raise exceptions.Unauthorised(self._(
                 'You do not have permission to create %(class)s'
-            ) % {'class': cn}
+            ) % {'class': cn})
 
         # create the node and return its id
         cl = self.db.classes[cn]
@@ -551,24 +557,19 @@
         if not self.hasPermission('Create', classname=classname):
             return 0
 
-        # Check Edit permission for each property, to avoid being able
+        # Check Create permission for each property, to avoid being able
         # to set restricted ones on new item creation
         for key in props:
-            if not self.hasPermission('Edit', classname=classname,
+            if not self.hasPermission('Create', classname=classname,
                                       property=key):
-                # We restrict by default and special-case allowed properties
-                if key == 'date' or key == 'content':
-                    continue
-                elif key == 'author' and props[key] == self.userid:
-                    continue
                 return 0
         return 1
 
 class EditItemAction(EditCommon):
     def lastUserActivity(self):
-        if self.form.has_key(':lastactivity'):
+        if ':lastactivity' in self.form:
             d = date.Date(self.form[':lastactivity'].value)
-        elif self.form.has_key('@lastactivity'):
+        elif '@lastactivity' in self.form:
             d = date.Date(self.form['@lastactivity'].value)
         else:
             return None
@@ -588,7 +589,7 @@
             props, links = self.client.parsePropsFromForm()
             key = (self.classname, self.nodeid)
             # we really only collide for direct prop edit conflicts
-            return props[key].keys()
+            return list(props[key])
         else:
             return []
 
@@ -638,12 +639,12 @@
         # we will want to include index-page args in this URL too
         if self.nodeid is not None:
             url += self.nodeid
-        url += '?@ok_message=%s&@template=%s'%(urllib.quote(message),
-            urllib.quote(self.template))
+        url += '?@ok_message=%s&@template=%s'%(urllib_.quote(message),
+            urllib_.quote(self.template))
         if self.nodeid is None:
             req = templating.HTMLRequest(self.client)
             url += '&' + req.indexargs_url('', {})[1:]
-        raise exceptions.Redirect, url
+        raise exceptions.Redirect(url)
 
 class NewItemAction(EditCommon):
     def handle(self):
@@ -678,9 +679,9 @@
         self.db.commit()
 
         # redirect to the new item's page
-        raise exceptions.Redirect, '%s%s%s?@ok_message=%s&@template=%s' % (
-            self.base, self.classname, self.nodeid, urllib.quote(messages),
-            urllib.quote(self.template))
+        raise exceptions.Redirect('%s%s%s?@ok_message=%s&@template=%s' % (
+            self.base, self.classname, self.nodeid, urllib_.quote(messages),
+            urllib_.quote(self.template)))
 
 class PassResetAction(Action):
     def handle(self):
@@ -691,7 +692,7 @@
 
         """
         otks = self.db.getOTKManager()
-        if self.form.has_key('otk'):
+        if 'otk' in self.form:
             # pull the rego information out of the otk database
             otk = self.form['otk'].value
             uid = otks.get(otk, 'uid', default=None)
@@ -715,7 +716,7 @@
             # XXX we need to make the "default" page be able to display errors!
             try:
                 # set the password
-                cl.set(uid, password=password.Password(newpw))
+                cl.set(uid, password=password.Password(newpw, config=self.db.config))
                 # clear the props from the otk database
                 otks.destroy(otk)
                 self.db.commit()
@@ -743,7 +744,7 @@
             return
 
         # no OTK, so now figure the user
-        if self.form.has_key('username'):
+        if 'username' in self.form:
             name = self.form['username'].value
             try:
                 uid = self.db.user.lookup(name)
@@ -751,7 +752,7 @@
                 self.client.error_message.append(self._('Unknown username'))
                 return
             address = self.db.user.get(uid, 'address')
-        elif self.form.has_key('address'):
+        elif 'address' in self.form:
             address = self.form['address'].value
             uid = uidFromAddress(self.db, ('', address), create=0)
             if not uid:
@@ -802,7 +803,7 @@
         # nice message
         message = self._('You are now registered, welcome!')
         url = '%suser%s?@ok_message=%s'%(self.base, self.userid,
-            urllib.quote(message))
+            urllib_.quote(message))
 
         # redirect to the user's page (but not 302, as some email clients seem
         # to want to reload the page, or something)
@@ -845,12 +846,6 @@
                 % str(message))
             return
 
-        # registration isn't allowed to supply roles
-        user_props = props[('user', None)]
-        if user_props.has_key('roles'):
-            raise exceptions.Unauthorised, self._(
-                "It is not permitted to supply roles at registration.")
-
         # skip the confirmation step?
         if self.db.config['INSTANT_REGISTRATION']:
             # handle the create now
@@ -875,7 +870,8 @@
             return self.finishRego()
 
         # generate the one-time-key and store the props for later
-        for propname, proptype in self.db.user.getprops().items():
+        user_props = props[('user', None)]
+        for propname, proptype in self.db.user.getprops().iteritems():
             value = user_props.get(propname, None)
             if value is None:
                 pass
@@ -926,7 +922,18 @@
         self.db.commit()
 
         # redirect to the "you're almost there" page
-        raise exceptions.Redirect, '%suser?@template=rego_progress'%self.base
+        raise exceptions.Redirect('%suser?@template=rego_progress'%self.base)
+
+    def newItemPermission(self, props, classname=None):
+        """Just check the "Register" permission.
+        """
+        # registration isn't allowed to supply roles
+        if 'roles' in props:
+            raise exceptions.Unauthorised(self._(
+                "It is not permitted to supply roles at registration."))
+
+        # technically already checked, but here for clarity
+        return self.hasPermission('Register', classname=classname)
 
 class LogoutAction(Action):
     def handle(self):
@@ -956,13 +963,13 @@
             raise roundup.exceptions.Reject(self._('Invalid request'))
 
         # we need the username at a minimum
-        if not self.form.has_key('__login_name'):
+        if '__login_name' not in self.form:
             self.client.error_message.append(self._('Username required'))
             return
 
         # get the login info
         self.client.user = self.form['__login_name'].value
-        if self.form.has_key('__login_password'):
+        if '__login_password' in self.form:
             password = self.form['__login_password'].value
         else:
             password = ''
@@ -979,36 +986,43 @@
 
         # save user in session
         self.client.session_api.set(user=self.client.user)
-        if self.form.has_key('remember'):
+        if 'remember' in self.form:
             self.client.session_api.update(set_cookie=True, expire=24*3600*365)
 
         # If we came from someplace, go back there
-        if self.form.has_key('__came_from'):
-            raise exceptions.Redirect, self.form['__came_from'].value
+        if '__came_from' in self.form:
+            raise exceptions.Redirect(self.form['__came_from'].value)
 
     def verifyLogin(self, username, password):
         # make sure the user exists
         try:
             self.client.userid = self.db.user.lookup(username)
         except KeyError:
-            raise exceptions.LoginError, self._('Invalid login')
+            raise exceptions.LoginError(self._('Invalid login'))
 
         # verify the password
         if not self.verifyPassword(self.client.userid, password):
-            raise exceptions.LoginError, self._('Invalid login')
+            raise exceptions.LoginError(self._('Invalid login'))
 
         # Determine whether the user has permission to log in.
         # Base behaviour is to check the user has "Web Access".
         if not self.hasPermission("Web Access"):
-            raise exceptions.LoginError, self._(
-                "You do not have permission to login")
+            raise exceptions.LoginError(self._(
+                "You do not have permission to login"))
 
-    def verifyPassword(self, userid, password):
-        '''Verify the password that the user has supplied'''
-        stored = self.db.user.get(userid, 'password')
-        if password == stored:
+    def verifyPassword(self, userid, givenpw):
+        '''Verify the password that the user has supplied.
+           Optionally migrate to new password scheme if configured
+        '''
+        db = self.db
+        stored = db.user.get(userid, 'password')
+        if givenpw == stored:
+            if db.config.WEB_MIGRATE_PASSWORDS and stored.needs_migration():
+                newpw = password.Password(givenpw, config=db.config)
+                db.user.set(userid, password=newpw)
+                db.commit()
             return 1
-        if not password and not stored:
+        if not givenpw and not stored:
             return 1
         return 0
 
@@ -1067,9 +1081,9 @@
                 # check permission to view this property on this item
                 if not self.hasPermission('View', itemid=itemid,
                         classname=request.classname, property=name):
-                    raise exceptions.Unauthorised, self._(
+                    raise exceptions.Unauthorised(self._(
                         'You do not have permission to view %(class)s'
-                    ) % {'class': request.classname}
+                    ) % {'class': request.classname})
                 row.append(str(klass.get(itemid, name)))
             self.client._socket_op(writer.writerow, row)
 
@@ -1102,7 +1116,7 @@
 
     def execute_cgi(self):
         args = {}
-        for key in self.form.keys():
+        for key in self.form:
             args[key] = self.form.getvalue(key)
         self.permission(args)
         return self.handle(args)

Modified: tracker/roundup-src/roundup/cgi/cgitb.py
==============================================================================
--- tracker/roundup-src/roundup/cgi/cgitb.py	(original)
+++ tracker/roundup-src/roundup/cgi/cgitb.py	Thu Aug  4 15:46:52 2011
@@ -37,9 +37,7 @@
 
 def niceDict(indent, dict):
     l = []
-    keys = dict.keys()
-    keys.sort()
-    for k in keys:
+    for k in sorted(dict):
         v = dict[k]
         l.append('<tr><td><strong>%s</strong></td><td>%s</td></tr>'%(k,
             cgi.escape(repr(v))))
@@ -59,7 +57,7 @@
     t.reverse()
     for frame, file, lnum, func, lines, index in t:
         args, varargs, varkw, locals = inspect.getargvalues(frame)
-        if locals.has_key('__traceback_info__'):
+        if '__traceback_info__' in locals:
             ti = locals['__traceback_info__']
             if isinstance(ti, TraversalError):
                 s = []
@@ -72,7 +70,7 @@
                 ) % {'name': ti.name, 'path': s})
             else:
                 l.append(_('<li>In %s</li>') % esc(str(ti)))
-        if locals.has_key('__traceback_supplement__'):
+        if '__traceback_supplement__' in locals:
             ts = locals['__traceback_supplement__']
             if len(ts) == 2:
                 supp, context = ts
@@ -111,8 +109,8 @@
 
 def html(context=5, i18n=None):
     _ = get_translator(i18n)
-    etype, evalue = sys.exc_type, sys.exc_value
-    if type(etype) is types.ClassType:
+    etype, evalue = sys.exc_info()[0], sys.exc_info()[1]
+    if type(etype) is type:
         etype = etype.__name__
     pyver = 'Python ' + string.split(sys.version)[0] + '<br>' + sys.executable
     head = pydoc.html.heading(
@@ -169,13 +167,13 @@
         lvals = []
         for name in names:
             if name in frame.f_code.co_varnames:
-                if locals.has_key(name):
+                if name in locals:
                     value = pydoc.html.repr(locals[name])
                 else:
                     value = _('<em>undefined</em>')
                 name = '<strong>%s</strong>' % name
             else:
-                if frame.f_globals.has_key(name):
+                if name in frame.f_globals:
                     value = pydoc.html.repr(frame.f_globals[name])
                 else:
                     value = _('<em>undefined</em>')

Modified: tracker/roundup-src/roundup/cgi/client.py
==============================================================================
--- tracker/roundup-src/roundup/cgi/client.py	(original)
+++ tracker/roundup-src/roundup/cgi/client.py	Thu Aug  4 15:46:52 2011
@@ -2,11 +2,9 @@
 """
 __docformat__ = 'restructuredtext'
 
-import base64, binascii, cgi, codecs, httplib, mimetypes, os
-import quopri, random, re, rfc822, stat, sys, time, urllib, urlparse
-import Cookie, socket, errno
-from Cookie import CookieError, BaseCookie, SimpleCookie
-from cStringIO import StringIO
+import base64, binascii, cgi, codecs, mimetypes, os
+import quopri, random, re, rfc822, stat, sys, time
+import socket, errno
 
 from roundup import roundupdb, date, hyperdb, password
 from roundup.cgi import templating, cgitb, TranslationService
@@ -18,6 +16,12 @@
 from roundup.cgi import accept_language
 from roundup import xmlrpc
 
+from roundup.anypy.cookie_ import CookieError, BaseCookie, SimpleCookie, \
+    get_cookie_date
+from roundup.anypy.io_ import StringIO
+from roundup.anypy import http_
+from roundup.anypy import urllib_
+
 def initialiseSecurity(security):
     '''Create some Permissions and Roles on the security object
 
@@ -43,7 +47,7 @@
 def clean_message_callback(match, ok={'a':1,'i':1,'b':1,'br':1}):
     """ Strip all non <a>,<i>,<b> and <br> tags from a string
     """
-    if ok.has_key(match.group(3).lower()):
+    if match.group(3).lower() in ok:
         return match.group(1)
     return '&lt;%s&gt;'%match.group(2)
 
@@ -293,14 +297,14 @@
 
         # this is the "cookie path" for this tracker (ie. the path part of
         # the "base" url)
-        self.cookie_path = urlparse.urlparse(self.base)[2]
+        self.cookie_path = urllib_.urlparse(self.base)[2]
         # cookies to set in http responce
         # {(path, name): (value, expire)}
         self._cookies = {}
 
         # see if we need to re-parse the environment for the form (eg Zope)
         if form is None:
-            self.form = cgi.FieldStorage(environ=env)
+            self.form = cgi.FieldStorage(fp=request.rfile, environ=env)
         else:
             self.form = form
 
@@ -380,6 +384,7 @@
         self.determine_language()
         # Open the database as the correct user.
         self.determine_user()
+        self.check_anonymous_access()
 
         # Call the appropriate XML-RPC method.
         handler = xmlrpc.RoundupDispatcher(self.db,
@@ -437,6 +442,11 @@
                 # figure out the context and desired content template
                 self.determine_context()
 
+                # if we've made it this far the context is to a bit of
+                # Roundup's real web interface (not a file being served up)
+                # so do the Anonymous Web Acess check now
+                self.check_anonymous_access()
+
                 # possibly handle a form submit action (may change self.classname
                 # and self.template, and may also append error/ok_messages)
                 html = self.handle_action()
@@ -495,12 +505,12 @@
             # authorization, send back a response that will cause the
             # browser to prompt the user again.
             if self.instance.config.WEB_HTTP_AUTH:
-                self.response_code = httplib.UNAUTHORIZED
+                self.response_code = http_.client.UNAUTHORIZED
                 realm = self.instance.config.TRACKER_NAME
                 self.setHeader("WWW-Authenticate",
                                "Basic realm=\"%s\"" % realm)
             else:
-                self.response_code = httplib.FORBIDDEN
+                self.response_code = http_.client.FORBIDDEN
             self.renderFrontPage(message)
         except Unauthorised, message:
             # users may always see the front page
@@ -520,15 +530,15 @@
                 # we can't map the URL to a class we know about
                 # reraise the NotFound and let roundup_server
                 # handle it
-                raise NotFound, e
+                raise NotFound(e)
         except FormError, e:
             self.error_message.append(self._('Form Error: ') + str(e))
             self.write_html(self.renderContext())
         except:
             # Something has gone badly wrong.  Therefore, we should
             # make sure that the response code indicates failure.
-            if self.response_code == httplib.OK:
-                self.response_code = httplib.INTERNAL_SERVER_ERROR
+            if self.response_code == http_.client.OK:
+                self.response_code = http_.client.INTERNAL_SERVER_ERROR
             # Help the administrator work out what went wrong.
             html = ("<h1>Traceback</h1>"
                     + cgitb.html(i18n=self.translator)
@@ -611,12 +621,12 @@
         """
         # look for client charset
         charset_parameter = 0
-        if self.form.has_key('@charset'):
+        if '@charset' in self.form:
             charset = self.form['@charset'].value
             if charset.lower() == "none":
                 charset = ""
             charset_parameter = 1
-        elif self.cookie.has_key('roundup_charset'):
+        elif 'roundup_charset' in self.cookie:
             charset = self.cookie['roundup_charset'].value
         else:
             charset = None
@@ -653,7 +663,7 @@
                     uc = int(num)
                 return unichr(uc)
 
-            for field_name in self.form.keys():
+            for field_name in self.form:
                 field = self.form[field_name]
                 if (field.type == 'text/plain') and not field.filename:
                     try:
@@ -668,12 +678,12 @@
         # look for language parameter
         # then for language cookie
         # last for the Accept-Language header
-        if self.form.has_key("@language"):
+        if "@language" in self.form:
             language = self.form["@language"].value
             if language.lower() == "none":
                 language = ""
             self.add_cookie("roundup_language", language)
-        elif self.cookie.has_key("roundup_language"):
+        elif "roundup_language" in self.cookie:
             language = self.cookie["roundup_language"].value
         elif self.instance.config["WEB_USE_BROWSER_LANGUAGE"]:
             hal = self.env.get('HTTP_ACCEPT_LANGUAGE')
@@ -701,7 +711,7 @@
         user = None
         # first up, try http authorization if enabled
         if self.instance.config['WEB_HTTP_AUTH']:
-            if self.env.has_key('REMOTE_USER'):
+            if 'REMOTE_USER' in self.env:
                 # we have external auth (e.g. by Apache)
                 user = self.env['REMOTE_USER']
             elif self.env.get('HTTP_AUTHORIZATION', ''):
@@ -745,15 +755,44 @@
         # make sure the anonymous user is valid if we're using it
         if user == 'anonymous':
             self.make_user_anonymous()
-            if not self.db.security.hasPermission('Web Access', self.userid):
-                raise Unauthorised, self._("Anonymous users are not "
-                    "allowed to use the web interface")
         else:
             self.user = user
 
         # reopen the database as the correct user
         self.opendb(self.user)
 
+    def check_anonymous_access(self):
+        """Check that the Anonymous user is actually allowed to use the web
+        interface and short-circuit all further processing if they're not.
+        """
+        # allow Anonymous to use the "login" and "register" actions (noting
+        # that "register" has its own "Register" permission check)
+
+        if ':action' in self.form:
+            action = self.form[':action']
+        elif '@action' in self.form:
+            action = self.form['@action']
+        else:
+            action = ''
+        if isinstance(action, list):
+            raise SeriousError('broken form: multiple @action values submitted')
+        elif action != '':
+            action = action.value.lower()
+        if action in ('login', 'register'):
+            return
+
+        # allow Anonymous to view the "user" "register" template if they're
+        # allowed to register
+        if (self.db.security.hasPermission('Register', self.userid, 'user')
+                and self.classname == 'user' and self.template == 'register'):
+            return
+
+        # otherwise for everything else
+        if self.user == 'anonymous':
+            if not self.db.security.hasPermission('Web Access', self.userid):
+                raise Unauthorised(self._("Anonymous users are not "
+                    "allowed to use the web interface"))
+
     def opendb(self, username):
         """Open the database and set the current user.
 
@@ -826,7 +865,7 @@
 
         # see if a template or messages are specified
         template_override = ok_message = error_message = None
-        for key in self.form.keys():
+        for key in self.form:
             if self.FV_TEMPLATE.match(key):
                 template_override = self.form[key].value
             elif self.FV_OK_MESSAGE.match(key):
@@ -851,12 +890,12 @@
                 self.template = ''
             return
         elif path[0] in ('_file', '@@file'):
-            raise SendStaticFile, os.path.join(*path[1:])
+            raise SendStaticFile(os.path.join(*path[1:]))
         else:
             self.classname = path[0]
             if len(path) > 1:
                 # send the file identified by the designator in path[0]
-                raise SendFile, path[0]
+                raise SendFile(path[0])
 
         # see if we got a designator
         m = dre.match(self.classname)
@@ -866,13 +905,13 @@
             try:
                 klass = self.db.getclass(self.classname)
             except KeyError:
-                raise NotFound, '%s/%s'%(self.classname, self.nodeid)
+                raise NotFound('%s/%s'%(self.classname, self.nodeid))
             if long(self.nodeid) > 2**31:
                 # Postgres will complain with a ProgrammingError
                 # if we try to pass in numbers that are too large
-                raise NotFound, '%s/%s'%(self.classname, self.nodeid)
+                raise NotFound('%s/%s'%(self.classname, self.nodeid))
             if not klass.hasnode(self.nodeid):
-                raise NotFound, '%s/%s'%(self.classname, self.nodeid)
+                raise NotFound('%s/%s'%(self.classname, self.nodeid))
             # with a designator, we default to item view
             self.template = 'item'
         else:
@@ -883,7 +922,7 @@
         try:
             self.db.getclass(self.classname)
         except KeyError:
-            raise NotFound, self.classname
+            raise NotFound(self.classname)
 
         # see if we have a template override
         if template_override is not None:
@@ -894,34 +933,39 @@
         """
         m = dre.match(str(designator))
         if not m:
-            raise NotFound, str(designator)
+            raise NotFound(str(designator))
         classname, nodeid = m.group(1), m.group(2)
 
         try:
             klass = self.db.getclass(classname)
         except KeyError:
             # The classname was not valid.
-            raise NotFound, str(designator)
-            
+            raise NotFound(str(designator))
+
+        # perform the Anonymous user access check
+        self.check_anonymous_access()
 
         # make sure we have the appropriate properties
         props = klass.getprops()
-        if not props.has_key('type'):
-            raise NotFound, designator
-        if not props.has_key('content'):
-            raise NotFound, designator
+        if 'type' not in props:
+            raise NotFound(designator)
+        if 'content' not in props:
+            raise NotFound(designator)
 
         # make sure we have permission
         if not self.db.security.hasPermission('View', self.userid,
                 classname, 'content', nodeid):
-            raise Unauthorised, self._("You are not allowed to view "
-                "this file.")
+            raise Unauthorised(self._("You are not allowed to view "
+                "this file."))
 
-        # MvL 20100404: catch IndexError
+        # MvL 20100404: catch IndexError (issue #326)
         try:
             mime_type = klass.get(nodeid, 'type')
         except IndexError:
-            raise NotFound, designator
+            raise NotFound(designator)
+        # Can happen for msg class:
+        if not mime_type:
+            mime_type = 'text/plain'
 
         # if the mime_type is HTML-ish then make sure we're allowed to serve up
         # HTML-ish content
@@ -969,7 +1013,7 @@
             if os.path.isfile(filename) and filename.startswith(prefix):
                 break
         else:
-            raise NotFound, file
+            raise NotFound(file)
 
         # last-modified time
         lmt = os.stat(filename)[stat.ST_MTIME]
@@ -998,7 +1042,7 @@
         # XXX see which interfaces set this
         #if hasattr(self.request, 'headers'):
             #ims = self.request.headers.getheader('if-modified-since')
-        if self.env.has_key('HTTP_IF_MODIFIED_SINCE'):
+        if 'HTTP_IF_MODIFIED_SINCE' in self.env:
             # cgi will put the header in the env var
             ims = self.env['HTTP_IF_MODIFIED_SINCE']
         if ims:
@@ -1022,7 +1066,7 @@
         message['Content-type'] = 'text/html; charset=utf-8'
         message.set_payload(content)
         encode_quopri(message)
-        self.mailer.smtp_send(to, str(message))
+        self.mailer.smtp_send(to, message.as_string())
     
     def renderFrontPage(self, message):
         """Return the front page of the tracker."""
@@ -1067,9 +1111,9 @@
                 result = result.replace('</body>', s)
             return result
         except templating.NoTemplate, message:
-            return '<strong>%s</strong>'%message
+            return '<strong>%s</strong>'%cgi.escape(str(message))
         except templating.Unauthorised, message:
-            raise Unauthorised, str(message)
+            raise Unauthorised(cgi.escape(str(message)))
         except:
             # everything else
             if self.instance.config.WEB_DEBUG:
@@ -1087,7 +1131,7 @@
                 # receive an error message, and the adminstrator will
                 # receive a traceback, albeit with less information
                 # than the one we tried to generate above.
-                raise exc_info[0], exc_info[1], exc_info[2]
+                raise exc_info[0](exc_info[1]).with_traceback(exc_info[2])
 
     # these are the actions that are available
     actions = (
@@ -1117,13 +1161,18 @@
             We explicitly catch Reject and ValueError exceptions and
             present their messages to the user.
         """
-        if self.form.has_key(':action'):
-            action = self.form[':action'].value.lower()
-        elif self.form.has_key('@action'):
-            action = self.form['@action'].value.lower()
+        if ':action' in self.form:
+            action = self.form[':action']
+        elif '@action' in self.form:
+            action = self.form['@action']
         else:
             return None
 
+        if isinstance(action, list):
+            raise SeriousError('broken form: multiple @action values submitted')
+        else:
+            action = action.value.lower()
+
         try:
             action_klass = self.get_action_class(action)
 
@@ -1139,7 +1188,7 @@
 
     def get_action_class(self, action_name):
         if (hasattr(self.instance, 'cgi_actions') and
-                self.instance.cgi_actions.has_key(action_name)):
+                action_name in self.instance.cgi_actions):
             # tracker-defined action
             action_klass = self.instance.cgi_actions[action_name]
         else:
@@ -1148,7 +1197,7 @@
                 if name == action_name:
                     break
             else:
-                raise ValueError, 'No such action "%s"'%action_name
+                raise ValueError('No such action "%s"'%action_name)
         return action_klass
 
     def _socket_op(self, call, *args, **kwargs):
@@ -1188,7 +1237,7 @@
     def write_html(self, content):
         if not self.headers_done:
             # at this point, we are sure about Content-Type
-            if not self.additional_headers.has_key('Content-Type'):
+            if 'Content-Type' not in self.additional_headers:
                 self.additional_headers['Content-Type'] = \
                     'text/html; charset=%s' % self.charset
             self.header()
@@ -1350,14 +1399,14 @@
                 return None
             # Return code 416 with a Content-Range header giving the
             # allowable range.
-            self.response_code = httplib.REQUESTED_RANGE_NOT_SATISFIABLE
+            self.response_code = http_.client.REQUESTED_RANGE_NOT_SATISFIABLE
             self.setHeader("Content-Range", "bytes */%d" % length)
             return None
         # RFC 2616 10.2.7: 206 Partial Content
         #
         # Tell the client that we are honoring the Range request by
         # indicating that we are providing partial content.
-        self.response_code = httplib.PARTIAL_CONTENT
+        self.response_code = http_.client.PARTIAL_CONTENT
         # RFC 2616 14.16: Content-Range
         #
         # Tell the client what data we are providing.
@@ -1411,7 +1460,7 @@
         # If the client doesn't actually want the body, or if we are
         # indicating an invalid range.
         if (self.env['REQUEST_METHOD'] == 'HEAD'
-            or self.response_code == httplib.REQUESTED_RANGE_NOT_SATISFIABLE):
+            or self.response_code == http_.client.REQUESTED_RANGE_NOT_SATISFIABLE):
             return
         # Use the optimized "sendfile" operation, if possible.
         if hasattr(self.request, "sendfile"):
@@ -1446,12 +1495,12 @@
         if headers.get('Content-Type', 'text/html') == 'text/html':
             headers['Content-Type'] = 'text/html; charset=utf-8'
 
-        headers = headers.items()
+        headers = list(headers.items())
 
-        for ((path, name), (value, expire)) in self._cookies.items():
+        for ((path, name), (value, expire)) in self._cookies.iteritems():
             cookie = "%s=%s; Path=%s;"%(name, value, path)
             if expire is not None:
-                cookie += " expires=%s;"%Cookie._getdate(expire)
+                cookie += " expires=%s;"%get_cookie_date(expire)
             headers.append(('Set-Cookie', cookie))
 
         self._socket_op(self.request.start_response, headers, response)

Modified: tracker/roundup-src/roundup/cgi/form_parser.py
==============================================================================
--- tracker/roundup-src/roundup/cgi/form_parser.py	(original)
+++ tracker/roundup-src/roundup/cgi/form_parser.py	Thu Aug  4 15:46:52 2011
@@ -383,7 +383,7 @@
                     raise FormError, self._('Password and confirmation text '
                         'do not match')
                 try:
-                    value = password.Password(value)
+                    value = password.Password(value, config=self.db.config)
                 except hyperdb.HyperdbValueError, msg:
                     raise FormError, msg
 

Modified: tracker/roundup-src/roundup/cgi/templating.py
==============================================================================
--- tracker/roundup-src/roundup/cgi/templating.py	(original)
+++ tracker/roundup-src/roundup/cgi/templating.py	Thu Aug  4 15:46:52 2011
@@ -27,6 +27,8 @@
 from roundup import i18n
 from roundup.i18n import _
 
+from KeywordsExpr import render_keywords_expression_editor
+
 try:
     import cPickle as pickle
 except ImportError:
@@ -115,9 +117,9 @@
     if os.path.exists(src):
         return (src, generic)
 
-    raise NoTemplate, 'No template file exists for templating "%s" '\
+    raise NoTemplate('No template file exists for templating "%s" '
         'with template "%s" (neither "%s" nor "%s")'%(name, view,
-        filename, generic)
+        filename, generic))
 
 class Templates:
     templates = {}
@@ -520,20 +522,23 @@
     def is_edit_ok(self):
         """ Is the user allowed to Create the current class?
         """
-        return self._db.security.hasPermission('Create', self._client.userid,
-            self._classname)
+        perm = self._db.security.hasPermission
+        return perm('Web Access', self._client.userid) and perm('Create',
+            self._client.userid, self._classname)
 
     def is_retire_ok(self):
         """ Is the user allowed to retire items of the current class?
         """
-        return self._db.security.hasPermission('Retire', self._client.userid,
-            self._classname)
+        perm = self._db.security.hasPermission
+        return perm('Web Access', self._client.userid) and perm('Retire',
+            self._client.userid, self._classname)
 
     def is_view_ok(self):
         """ Is the user allowed to View the current class?
         """
-        return self._db.security.hasPermission('View', self._client.userid,
-            self._classname)
+        perm = self._db.security.hasPermission
+        return perm('Web Access', self._client.userid) and perm('View',
+            self._client.userid, self._classname)
 
     def is_only_view_ok(self):
         """ Is the user only allowed to View (ie. not Create) the current class?
@@ -562,10 +567,7 @@
         for klass, htmlklass in propclasses:
             if not isinstance(prop, klass):
                 continue
-            if isinstance(prop, hyperdb.Multilink):
-                value = []
-            else:
-                value = None
+            value = prop.get_default_value()
             return htmlklass(self._client, self._classname, None, prop, item,
                 value, self._anonymous)
 
@@ -598,13 +600,10 @@
         l = []
         for name, prop in self._props.items():
             for klass, htmlklass in propclasses:
-                if isinstance(prop, hyperdb.Multilink):
-                    value = []
-                else:
-                    value = None
                 if isinstance(prop, klass):
+                    value = prop.get_default_value()
                     l.append(htmlklass(self._client, self._classname, '',
-                        prop, name, value, self._anonymous))
+                                       prop, name, value, self._anonymous))
         if sort:
             l.sort(lambda a,b:cmp(a._name, b._name))
         return l
@@ -620,6 +619,8 @@
         # check perms
         check = self._client.db.security.hasPermission
         userid = self._client.userid
+        if not check('Web Access', userid):
+            return []
 
         l = [HTMLItem(self._client, self._classname, id) for id in l
             if check('View', userid, self._classname, itemid=id)]
@@ -634,11 +635,14 @@
         writer = csv.writer(s)
         writer.writerow(props)
         check = self._client.db.security.hasPermission
+        userid = self._client.userid
+        if not check('Web Access', userid):
+            return ''
         for nodeid in self._klass.list():
             l = []
             for name in props:
                 # check permission to view this property on this item
-                if not check('View', self._client.userid, itemid=nodeid,
+                if not check('View', userid, itemid=nodeid,
                         classname=self._klass.classname, property=name):
                     raise Unauthorised('view', self._klass.classname,
                         translator=self._client.translator)
@@ -665,13 +669,23 @@
 
             "request" takes precedence over the other three arguments.
         """
+        security = self._db.security
+        userid = self._client.userid
         if request is not None:
+            # for a request we asume it has already been
+            # security-filtered
             filterspec = request.filterspec
             sort = request.sort
             group = request.group
+        else:
+            cn = self.classname
+            filterspec = security.filterFilterspec(userid, cn, filterspec)
+            sort = security.filterSortspec(userid, cn, sort)
+            group = security.filterSortspec(userid, cn, group)
 
-        check = self._db.security.hasPermission
-        userid = self._client.userid
+        check = security.hasPermission
+        if not check('Web Access', userid):
+            return []
 
         l = [HTMLItem(self._client, self.classname, id)
              for id in self._klass.filter(None, filterspec, sort, group)
@@ -801,20 +815,23 @@
     def is_edit_ok(self):
         """ Is the user allowed to Edit this item?
         """
-        return self._db.security.hasPermission('Edit', self._client.userid,
-            self._classname, itemid=self._nodeid)
+        perm = self._db.security.hasPermission
+        return perm('Web Access', self._client.userid) and perm('Edit',
+            self._client.userid, self._classname, itemid=self._nodeid)
 
     def is_retire_ok(self):
         """ Is the user allowed to Reture this item?
         """
-        return self._db.security.hasPermission('Retire', self._client.userid,
-            self._classname, itemid=self._nodeid)
+        perm = self._db.security.hasPermission
+        return perm('Web Access', self._client.userid) and perm('Retire',
+            self._client.userid, self._classname, itemid=self._nodeid)
 
     def is_view_ok(self):
         """ Is the user allowed to View this item?
         """
-        if self._db.security.hasPermission('View', self._client.userid,
-                self._classname, itemid=self._nodeid):
+        perm = self._db.security.hasPermission
+        if perm('Web Access', self._client.userid) and perm('View',
+                self._client.userid, self._classname, itemid=self._nodeid):
             return 1
         return self.is_edit_ok()
 
@@ -1091,6 +1108,13 @@
                             cell[-1] += ' -> %s'%current[k]
                             current[k] = val
 
+                    elif isinstance(prop, hyperdb.Password) and args[k] is not None:
+                        val = args[k].dummystr()
+                        cell.append('%s: %s'%(self._(k), val))
+                        if current.has_key(k):
+                            cell[-1] += ' -> %s'%current[k]
+                            current[k] = val
+
                     elif not args[k]:
                         if current.has_key(k):
                             cell.append('%s: %s'%(self._(k), current[k]))
@@ -1205,12 +1229,9 @@
         return self._db.security.hasPermission(permission,
             self._nodeid, classname, property, itemid)
 
-    def hasRole(self, rolename):
-        """Determine whether the user has the Role."""
-        roles = self._db.user.get(self._nodeid, 'roles').split(',')
-        for role in roles:
-            if role.strip() == rolename: return True
-        return False
+    def hasRole(self, *rolenames):
+        """Determine whether the user has any role in rolenames."""
+        return self._db.user.has_role(self._nodeid, *rolenames)
 
 def HTMLItem(client, classname, nodeid, anonymous=0):
     if classname == 'user':
@@ -1240,7 +1261,12 @@
         self._anonymous = anonymous
         self._name = name
         if not anonymous:
-            self._formname = '%s%s@%s'%(classname, nodeid, name)
+            if nodeid:
+                self._formname = '%s%s@%s'%(classname, nodeid, name)
+            else:
+                # This case occurs when creating a property for a
+                # non-anonymous class.
+                self._formname = '%s@%s'%(classname, name)
         else:
             self._formname = name
 
@@ -1266,8 +1292,9 @@
         HTMLInputMixin.__init__(self)
 
     def __repr__(self):
-        return '<HTMLProperty(0x%x) %s %r %r>'%(id(self), self._formname,
-            self._prop, self._value)
+        classname = self.__class__.__name__
+        return '<%s(0x%x) %s %r %r>'%(classname, id(self), self._formname,
+                                      self._prop, self._value)
     def __str__(self):
         return self.plain()
     def __cmp__(self, other):
@@ -1287,19 +1314,22 @@
         property. Check "Create" for new items, or "Edit" for existing
         ones.
         """
+        perm = self._db.security.hasPermission
+        userid = self._client.userid
         if self._nodeid:
-            return self._db.security.hasPermission('Edit', self._client.userid,
-                self._classname, self._name, self._nodeid)
-        return self._db.security.hasPermission('Create', self._client.userid,
-            self._classname, self._name) or \
-            self._db.security.hasPermission('Register', self._client.userid,
-                                            self._classname, self._name)
+            if not perm('Web Access', userid):
+                return False
+            return perm('Edit', userid, self._classname, self._name,
+                self._nodeid)
+        return perm('Create', userid, self._classname, self._name) or \
+            perm('Register', userid, self._classname, self._name)
 
     def is_view_ok(self):
         """ Is the user allowed to View the current class?
         """
-        if self._db.security.hasPermission('View', self._client.userid,
-                self._classname, self._name, self._nodeid):
+        perm = self._db.security.hasPermission
+        if perm('Web Access',  self._client.userid) and perm('View',
+                self._client.userid, self._classname, self._name, self._nodeid):
             return 1
         return self.is_edit_ok()
 
@@ -1538,7 +1568,10 @@
 
         if self._value is None:
             return ''
-        return self._('*encrypted*')
+        value = self._value.dummystr()
+        if escape:
+            value = cgi.escape(value)
+        return value
 
     def field(self, size=30, **kwargs):
         """ Render a form edit field for the property.
@@ -2083,9 +2116,10 @@
         check = self._db.security.hasPermission
         userid = self._client.userid
         classname = self._prop.classname
-        for value in values:
-            if check('View', userid, classname, itemid=value):
-                yield HTMLItem(self._client, classname, value)
+        if check('Web Access', userid):
+            for value in values:
+                if check('View', userid, classname, itemid=value):
+                    yield HTMLItem(self._client, classname, value)
 
     def __iter__(self):
         """ iterate and return a new HTMLItem
@@ -2149,16 +2183,19 @@
             return self.plain(escape=1)
 
         linkcl = self._db.getclass(self._prop.classname)
-        value = self._value[:]
-        # map the id to the label property
-        if not linkcl.getkey():
-            showid=1
-        if not showid:
-            k = linkcl.labelprop(1)
-            value = lookupKeys(linkcl, k, value)
-        value = ','.join(value)
-        return self.input(name=self._formname, size=size, value=value,
-                          **kwargs)
+
+        if 'value' not in kwargs:
+            value = self._value[:]
+            # map the id to the label property
+            if not linkcl.getkey():
+                showid=1
+            if not showid:
+                k = linkcl.labelprop(1)
+                value = lookupKeys(linkcl, k, value)
+            value = ','.join(value)
+            kwargs["value"] = value
+
+        return self.input(name=self._formname, size=size, **kwargs)
 
     def menu(self, size=None, height=None, showid=0, additional=[],
              value=None, sort_on=None, html_kwargs = {}, **conditions):
@@ -2292,13 +2329,19 @@
 
 
 def make_sort_function(db, classname, sort_on=None):
-    """Make a sort function for a given class
-    """
+    """Make a sort function for a given class.
+
+    The list being sorted may contain mixed ids and labels.
+     """
     linkcl = db.getclass(classname)
     if sort_on is None:
         sort_on = linkcl.orderprop()
     def sortfunc(a, b):
-        return cmp(linkcl.get(a, sort_on), linkcl.get(b, sort_on))
+        if num_re.match(a):
+            a = linkcl.get(a, sort_on)
+        if num_re.match(b):
+            b = linkcl.get(b, sort_on)
+        return cmp(a, b)
     return sortfunc
 
 def handleListCGIValue(value):
@@ -2427,12 +2470,16 @@
                 self.columns = handleListCGIValue(self.form[name])
                 break
         self.show = support.TruthDict(self.columns)
+        security = self._client.db.security
+        userid = self._client.userid
 
         # sorting and grouping
         self.sort = []
         self.group = []
         self._parse_sort(self.sort, 'sort')
         self._parse_sort(self.group, 'group')
+        self.sort = security.filterSortspec(userid, self.classname, self.sort)
+        self.group = security.filterSortspec(userid, self.classname, self.group)
 
         # filtering
         self.filter = []
@@ -2462,6 +2509,8 @@
                         self.filterspec[name] = handleListCGIValue(fv)
                     else:
                         self.filterspec[name] = fv.value
+        self.filterspec = security.filterFilterspec(userid, self.classname,
+            self.filterspec)
 
         # full-text search argument
         self.search_text = None
@@ -2697,9 +2746,15 @@
             ignore[(klass, prop)] = None
         return ignore
 
-    def batch(self, to_ignore='ignore'):
+    def batch(self, permission='View', to_ignore='ignore'):
         """ Return a batch object for results from the "current search"
         """
+        check = self._client.db.security.hasPermission
+        userid = self._client.userid
+        if not check('Web Access', userid):
+            return Batch(self.client, [], self.pagesize, self.startwith,
+                classname=self.classname)
+
         filterspec = self.filterspec
         sort = self.sort
         group = self.group
@@ -2717,10 +2772,8 @@
             matches = None
 
         # filter for visibility
-        check = self._client.db.security.hasPermission
-        userid = self._client.userid
         l = [id for id in klass.filter(matches, filterspec, sort, group)
-            if check('View', userid, self.classname, itemid=id)]
+            if check(permission, userid, self.classname, itemid=id)]
 
         # return the batch object, using IDs only
         return Batch(self.client, l, self.pagesize, self.startwith,
@@ -2845,6 +2898,9 @@
             raise AttributeError, name
         return self.client.instance.templating_utils[name]
 
+    def keywords_expressions(self, request):
+        return render_keywords_expression_editor(request)
+
     def html_calendar(self, request):
         """Generate a HTML calendar.
 
@@ -2858,7 +2914,9 @@
 
         html will simply be a table.
         """
-        date_str  = request.form.getfirst("date", ".")
+        tz = request.client.db.getUserTimezone()
+        current_date = date.Date(".").local(tz)
+        date_str  = request.form.getfirst("date", current_date)
         display   = request.form.getfirst("display", date_str)
         template  = request.form.getfirst("@template", "calendar")
         form      = request.form.getfirst("form")

Modified: tracker/roundup-src/roundup/cgi/wsgi_handler.py
==============================================================================
--- tracker/roundup-src/roundup/cgi/wsgi_handler.py	(original)
+++ tracker/roundup-src/roundup/cgi/wsgi_handler.py	Thu Aug  4 15:46:52 2011
@@ -10,7 +10,7 @@
 
 import roundup.instance
 from roundup.cgi import TranslationService
-from BaseHTTPServer import BaseHTTPRequestHandler
+from BaseHTTPServer import BaseHTTPRequestHandler, DEFAULT_ERROR_MESSAGE
 
 
 class Writer(object):
@@ -43,6 +43,14 @@
         request.wfile = Writer(request)
         request.__wfile = None
 
+        if environ ['REQUEST_METHOD'] == 'OPTIONS':
+            code = 501
+            message, explain = BaseHTTPRequestHandler.responses[code]
+            request.start_response([('Content-Type', 'text/html'),
+                ('Connection', 'close')], code)
+            request.wfile.write(DEFAULT_ERROR_MESSAGE % locals())
+            return []
+
         tracker = roundup.instance.open(self.home, not self.debug)
 
         # need to strip the leading '/'
@@ -65,9 +73,9 @@
 
     def start_response(self, headers, response_code):
         """Set HTTP response code"""
-        description = BaseHTTPRequestHandler.responses[response_code]
+        message, explain = BaseHTTPRequestHandler.responses[response_code]
         self.__wfile = self.__start_response('%d %s'%(response_code,
-            description), headers)
+            message), headers)
 
     def get_wfile(self):
         if self.__wfile is None:

Modified: tracker/roundup-src/roundup/configuration.py
==============================================================================
--- tracker/roundup-src/roundup/configuration.py	(original)
+++ tracker/roundup-src/roundup/configuration.py	Thu Aug  4 15:46:52 2011
@@ -537,6 +537,22 @@
             "starting with python 2.5. Set this to a higher value if you\n"
             "get the error 'Error: field larger than field limit' during\n"
             "import."),
+        (IntegerNumberOption, 'password_pbkdf2_default_rounds', '10000',
+            "Sets the default number of rounds used when encoding passwords\n"
+            "using the PBKDF2 scheme. Set this to a higher value on faster\n"
+            "systems which want more security.\n"
+            "PBKDF2 (Password-Based Key Derivation Function) is a\n"
+            "password hashing mechanism that derives hash from the\n"
+            "password and a random salt. For authentication this process\n"
+            "is repeated with the same salt as in the stored hash.\n"
+            "If both hashes match, the authentication succeeds.\n"
+            "PBKDF2 supports a variable 'rounds' parameter which varies\n"
+            "the time-cost of calculating the hash - doubling the number\n"
+            "of rounds doubles the cpu time required to calculate it. The\n"
+            "purpose of this is to periodically adjust the rounds as CPUs\n"
+            "become faster. The currently enforced minimum number of\n"
+            "rounds is 1000.\n"
+            "See: http://en.wikipedia.org/wiki/PBKDF2 and RFC2898"),
     )),
     ("tracker", (
         (Option, "name", "Roundup issue tracker",
@@ -579,6 +595,10 @@
             "Setting this option makes Roundup display error tracebacks\n"
             "in the user's browser rather than emailing them to the\n"
             "tracker admin."),
+        (BooleanOption, "migrate_passwords", "yes",
+            "Setting this option makes Roundup migrate passwords with\n"
+            "an insecure password-scheme to a more secure scheme\n"
+            "when the user logs in via the web-interface."),
     )),
     ("rdbms", (
         (Option, 'name', 'roundup',
@@ -604,8 +624,30 @@
         (NullableOption, 'read_default_group', 'roundup',
             "Name of the group to use in the MySQL defaults file (.my.cnf).\n"
             "Only used in MySQL connections."),
+        (IntegerNumberOption, 'sqlite_timeout', '30',
+            "Number of seconds to wait when the SQLite database is locked\n"
+            "Default: use a 30 second timeout (extraordinarily generous)\n"
+            "Only used in SQLite connections."),
         (IntegerNumberOption, 'cache_size', '100',
             "Size of the node cache (in elements)"),
+        (BooleanOption, "allow_create", "yes",
+            "Setting this option to 'no' protects the database against table creations."),
+        (BooleanOption, "allow_alter", "yes",
+            "Setting this option to 'no' protects the database against table alterations."),
+        (BooleanOption, "allow_drop", "yes",
+            "Setting this option to 'no' protects the database against table drops."),
+        (NullableOption, 'template', '',
+            "Name of the PostgreSQL template for database creation.\n"
+            "For database creation the template used has to match\n"
+            "the character encoding used (UTF8), there are different\n"
+            "PostgreSQL installations using different templates with\n"
+            "different encodings. If you get an error:\n"
+            "  new encoding (UTF8) is incompatible with the encoding of\n"
+            "  the template database (SQL_ASCII)\n"
+            "  HINT:  Use the same encoding as in the template database,\n"
+            "  or use template0 as template.\n"
+            "then set this option to the template name given in the\n"
+            "error message."),
     ), "Settings in this section are used"
         " by RDBMS backends only"
     ),
@@ -725,6 +767,10 @@
             "will match an issue for the interval after the issue's\n"
             "creation or last activity. The interval is a standard\n"
             "Roundup interval."),
+        (BooleanOption, "subject_updates_title", "yes",
+            "Update issue title if incoming subject of email is different.\n"
+            "Setting this to \"no\" will ignore the title part of"
+            " the subject\nof incoming email messages.\n"),
         (RegExpOption, "refwd_re", "(\s*\W?\s*(fw|fwd|re|aw|sv|ang)\W)+",
             "Regular expression matching a single reply or forward\n"
             "prefix prepended by the mailer. This is explicitly\n"
@@ -740,6 +786,10 @@
             "Regular expression matching end of line."),
         (RegExpOption, "blankline_re", r"[\r\n]+\s*[\r\n]+",
             "Regular expression matching a blank line."),
+        (BooleanOption, "unpack_rfc822", "no",
+            "Unpack attached messages (encoded as message/rfc822 in MIME)\n"
+            "as multiple parts attached as files to the issue, if not\n"
+            "set we handle message/rfc822 attachments as a single file."),
         (BooleanOption, "ignore_alternatives", "no",
             "When parsing incoming mails, roundup uses the first\n"
             "text/plain part it finds. If this part is inside a\n"
@@ -1249,6 +1299,14 @@
         if home_dir is None:
             self.init_logging()
 
+    def copy(self):
+        new = CoreConfig()
+        new.sections = list(self.sections)
+        new.section_descriptions = dict(self.section_descriptions)
+        new.section_options = dict(self.section_options)
+        new.options = dict(self.options)
+        return new
+
     def _get_unset_options(self):
         need_set = Config._get_unset_options(self)
         # remove MAIL_PASSWORD if MAIL_USER is empty
@@ -1278,8 +1336,8 @@
             return
 
         _file = self["LOGGING_FILENAME"]
-        # set file & level on the root logger
-        logger = logging.getLogger()
+        # set file & level on the roundup logger
+        logger = logging.getLogger('roundup')
         if _file:
             hdlr = logging.FileHandler(_file)
         else:
@@ -1288,6 +1346,9 @@
             '%(asctime)s %(levelname)s %(message)s')
         hdlr.setFormatter(formatter)
         # no logging API to remove all existing handlers!?!
+        for h in logger.handlers:
+            h.close()
+            logger.removeHandler(hdlr)
         logger.handlers = [hdlr]
         logger.setLevel(logging._levelNames[self["LOGGING_LEVEL"] or "ERROR"])
 

Modified: tracker/roundup-src/roundup/date.py
==============================================================================
--- tracker/roundup-src/roundup/date.py	(original)
+++ tracker/roundup-src/roundup/date.py	Thu Aug  4 15:46:52 2011
@@ -249,14 +249,22 @@
            serving as translation functions.
         """
         self.setTranslator(translator)
+        # Python 2.3+ datetime object
+        # common case when reading from database: avoid double-conversion
+        if isinstance(spec, datetime.datetime):
+            if offset == 0:
+                self.year, self.month, self.day, self.hour, self.minute, \
+                    self.second = spec.timetuple()[:6]
+            else:
+                TZ = get_timezone(tz)
+                self.year, self.month, self.day, self.hour, self.minute, \
+                    self.second = TZ.localize(spec).utctimetuple()[:6]
+            self.second += spec.microsecond/1000000.
+            return
+
         if type(spec) == type(''):
             self.set(spec, offset=offset, add_granularity=add_granularity)
             return
-        elif isinstance(spec, datetime.datetime):
-            # Python 2.3+ datetime object
-            y,m,d,H,M,S,x,x,x = spec.timetuple()
-            S += spec.microsecond/1000000.
-            spec = (y,m,d,H,M,S,x,x,x)
         elif hasattr(spec, 'tuple'):
             spec = spec.tuple()
         elif isinstance(spec, Date):
@@ -522,6 +530,7 @@
 
     def local(self, offset):
         """ Return this date as yyyy-mm-dd.hh:mm:ss in a local time zone.
+            The offset is a pytz tz offset if pytz is installed.
         """
         y, m, d, H, M, S = _utc_to_local(self.year, self.month, self.day,
                 self.hour, self.minute, self.second, offset)
@@ -718,14 +727,11 @@
 
     def __cmp__(self, other):
         """Compare this interval to another interval."""
+
         if other is None:
             # we are always larger than None
             return 1
-        for attr in 'sign year month day hour minute second'.split():
-            r = cmp(getattr(self, attr), getattr(other, attr))
-            if r:
-                return r
-        return 0
+        return cmp(self.as_seconds(), other.as_seconds())
 
     def __str__(self):
         """Return this interval as a string."""

Modified: tracker/roundup-src/roundup/dist/command/build.py
==============================================================================
--- tracker/roundup-src/roundup/dist/command/build.py	(original)
+++ tracker/roundup-src/roundup/dist/command/build.py	Thu Aug  4 15:46:52 2011
@@ -32,31 +32,29 @@
         manifest = [l.strip() for l in f.readlines()]
     finally:
         f.close()
-    err = [line for line in manifest if not os.path.exists(line)]
-    err.sort()
+    err = set([line for line in manifest if not os.path.exists(line)])
     # ignore auto-generated files
-    if err == ['roundup-admin', 'roundup-demo', 'roundup-gettext',
-            'roundup-mailgw', 'roundup-server']:
-        err = []
+    err = err - set(['roundup-admin', 'roundup-demo', 'roundup-gettext',
+        'roundup-mailgw', 'roundup-server', 'roundup-xmlrpc-server'])
     if err:
         n = len(manifest)
         print '\n*** SOURCE WARNING: There are files missing (%d/%d found)!'%(
             n-len(err), n)
         print 'Missing:', '\nMissing: '.join(err)
 
+def build_message_files(command):
+    """For each locale/*.po, build .mo file in target locale directory"""
+    for (_src, _dst) in list_message_files():
+        _build_dst = os.path.join("build", _dst)
+        command.mkpath(os.path.dirname(_build_dst))
+        command.announce("Compiling %s -> %s" % (_src, _build_dst))
+        msgfmt.make(_src, _build_dst)
 
-class build(base):
 
-    def build_message_files(self):
-        """For each locale/*.po, build .mo file in target locale directory"""
-        for (_src, _dst) in list_message_files():
-            _build_dst = os.path.join("build", _dst)
-            self.mkpath(os.path.dirname(_build_dst))
-            self.announce("Compiling %s -> %s" % (_src, _build_dst))
-            msgfmt.make(_src, _build_dst)
+class build(base):
 
     def run(self):
         check_manifest()
-        self.build_message_files()
+        build_message_files(self)
         base.run(self)
 

Added: tracker/roundup-src/roundup/dist/command/install_lib.py
==============================================================================
--- (empty file)
+++ tracker/roundup-src/roundup/dist/command/install_lib.py	Thu Aug  4 15:46:52 2011
@@ -0,0 +1,9 @@
+from roundup.dist.command.build import build_message_files, check_manifest
+from distutils.command.install_lib import install_lib as base
+
+class install_lib(base):
+
+    def run(self):
+        check_manifest()
+        build_message_files(self)
+        base.run(self)

Modified: tracker/roundup-src/roundup/hyperdb.py
==============================================================================
--- tracker/roundup-src/roundup/hyperdb.py	(original)
+++ tracker/roundup-src/roundup/hyperdb.py	Thu Aug  4 15:46:52 2011
@@ -35,11 +35,15 @@
 #
 class _Type(object):
     """A roundup property type."""
-    def __init__(self, required=False):
+    def __init__(self, required=False, default_value = None):
         self.required = required
+        self.__default_value = default_value
     def __repr__(self):
         ' more useful for dumps '
         return '<%s.%s>'%(self.__class__.__module__, self.__class__.__name__)
+    def get_default_value(self):
+        """The default value when creating a new instance of this property.""" 
+        return self.__default_value
     def sort_repr (self, cls, val, name):
         """Representation used for sorting. This should be a python
         built-in type, otherwise sorting will take ages. Note that
@@ -50,8 +54,8 @@
 
 class String(_Type):
     """An object designating a String property."""
-    def __init__(self, indexme='no', required=False):
-        super(String, self).__init__(required)
+    def __init__(self, indexme='no', required=False, default_value = ""):
+        super(String, self).__init__(required, default_value)
         self.indexme = indexme == 'yes'
     def from_raw(self, value, propname='', **kw):
         """fix the CRLF/CR -> LF stuff"""
@@ -72,24 +76,12 @@
     def from_raw(self, value, **kw):
         if not value:
             return None
-        m = password.Password.pwre.match(value)
-        if m:
-            # password is being given to us encrypted
-            p = password.Password()
-            p.scheme = m.group(1)
-            if p.scheme not in 'SHA crypt plaintext'.split():
-                raise HyperdbValueError, \
-                        ('property %s: unknown encryption scheme %r') %\
-                        (kw['propname'], p.scheme)
-            p.password = m.group(2)
-            value = p
-        else:
-            try:
-                value = password.Password(value)
-            except password.PasswordValueError, message:
-                raise HyperdbValueError, \
-                        _('property %s: %s')%(kw['propname'], message)
-        return value
+        try:
+            return password.Password(encrypted=value, strict=True)
+        except password.PasswordValueError, message:
+            raise HyperdbValueError, \
+                    _('property %s: %s')%(kw['propname'], message)
+
     def sort_repr (self, cls, val, name):
         if not val:
             return val
@@ -97,8 +89,9 @@
 
 class Date(_Type):
     """An object designating a Date property."""
-    def __init__(self, offset=None, required=False):
-        super(Date, self).__init__(required)
+    def __init__(self, offset=None, required=False, default_value = None):
+        super(Date, self).__init__(required = required,
+                                   default_value = default_value)
         self._offset = offset
     def offset(self, db):
         if self._offset is not None:
@@ -136,10 +129,11 @@
 class _Pointer(_Type):
     """An object designating a Pointer property that links or multilinks
     to a node in a specified class."""
-    def __init__(self, classname, do_journal='yes', required=False):
+    def __init__(self, classname, do_journal='yes', required=False,
+                 default_value = None):
         """ Default is to journal link and unlink events
         """
-        super(_Pointer, self).__init__(required)
+        super(_Pointer, self).__init__(required, default_value)
         self.classname = classname
         self.do_journal = do_journal == 'yes'
     def __repr__(self):
@@ -175,6 +169,14 @@
        "do_journal" indicates whether the linked-to nodes should have
                     'link' and 'unlink' events placed in their journal
     """
+
+    def __init__(self, classname, do_journal = 'yes', required = False):
+
+        super(Multilink, self).__init__(classname,
+                                        do_journal,
+                                        required = required,
+                                        default_value = [])        
+
     def from_raw(self, value, db, klass, propname, itemid, **kw):
         if not value:
             return []
@@ -284,18 +286,17 @@
     """ Simple tree data structure for optimizing searching of
     properties. Each node in the tree represents a roundup Class
     Property that has to be navigated for finding the given search
-    or sort properties. The sort_type attribute is used for
-    distinguishing nodes in the tree used for sorting or searching: If
-    it is 0 for a node, that node is not used for sorting. If it is 1,
-    it is used for both, sorting and searching. If it is 2 it is used
-    for sorting only.
+    or sort properties. The need_for attribute is used for
+    distinguishing nodes in the tree used for sorting, searching or
+    retrieval: The attribute is a dictionary containing one or several
+    of the values 'sort', 'search', 'retrieve'.
 
     The Proptree is also used for transitively searching attributes for
     backends that do not support transitive search (e.g. anydbm). The
     _val attribute with set_val is used for this.
     """
 
-    def __init__(self, db, cls, name, props, parent = None):
+    def __init__(self, db, cls, name, props, parent=None, retr=False):
         self.db = db
         self.name = name
         self.props = props
@@ -308,7 +309,7 @@
         self.children = []
         self.sortattr = []
         self.propdict = {}
-        self.sort_type = 0
+        self.need_for = {'search' : True}
         self.sort_direction = None
         self.sort_ids = None
         self.sort_ids_needed = False
@@ -317,6 +318,7 @@
         self.tree_sort_done = False
         self.propclass = None
         self.orderby = []
+        self.sql_idx = None # index of retrieved column in sql result
         if parent:
             self.root = parent.root
             self.depth = parent.depth + 1
@@ -324,7 +326,7 @@
             self.root = self
             self.seqno = 1
             self.depth = 0
-            self.sort_type = 1
+            self.need_for['sort'] = True
         self.id = self.root.seqno
         self.root.seqno += 1
         if self.cls:
@@ -332,15 +334,18 @@
             self.uniqname = '%s%s' % (self.cls.classname, self.id)
         if not self.parent:
             self.uniqname = self.cls.classname
+        if retr:
+            self.append_retr_props()
 
-    def append(self, name, sort_type = 0):
+    def append(self, name, need_for='search', retr=False):
         """Append a property to self.children. Will create a new
         propclass for the child.
         """
         if name in self.propdict:
             pt = self.propdict[name]
-            if sort_type and not pt.sort_type:
-                pt.sort_type = 1
+            pt.need_for[need_for] = True
+            if retr and isinstance(pt.propclass, Link):
+                pt.append_retr_props()
             return pt
         propclass = self.props[name]
         cls = None
@@ -349,15 +354,24 @@
             cls = self.db.getclass(propclass.classname)
             props = cls.getprops()
         child = self.__class__(self.db, cls, name, props, parent = self)
-        child.sort_type = sort_type
+        child.need_for = {need_for : True}
         child.propclass = propclass
         self.children.append(child)
         self.propdict[name] = child
+        if retr and isinstance(child.propclass, Link):
+            child.append_retr_props()
         return child
 
+    def append_retr_props(self):
+        """Append properties for retrieval."""
+        for name, prop in self.cls.getprops(protected=1).iteritems():
+            if isinstance(prop, Multilink):
+                continue
+            self.append(name, need_for='retrieve')
+
     def compute_sort_done(self, mlseen=False):
         """ Recursively check if attribute is needed for sorting
-        (self.sort_type > 0) or all children have tree_sort_done set and
+        ('sort' in self.need_for) or all children have tree_sort_done set and
         sort_ids_needed unset: set self.tree_sort_done if one of the conditions
         holds. Also remove sort_ids_needed recursively once having seen a
         Multilink.
@@ -371,7 +385,7 @@
             p.compute_sort_done(mlseen)
             if not p.tree_sort_done:
                 self.tree_sort_done = False
-        if not self.sort_type:
+        if 'sort' not in self.need_for:
             self.tree_sort_done = True
         if mlseen:
             self.tree_sort_done = False
@@ -389,7 +403,7 @@
         """
         filterspec = {}
         for p in self.children:
-            if p.sort_type < 2:
+            if 'search' in p.need_for:
                 if p.children:
                     p.search(sort = False)
                 filterspec[p.name] = p.val
@@ -413,7 +427,7 @@
         too.
         """
         return [p for p in self.children
-                if p.sort_type > 0 and (intermediate or p.sort_direction)]
+                if 'sort' in p.need_for and (intermediate or p.sort_direction)]
 
     def __iter__(self):
         """ Yield nodes in depth-first order -- visited nodes first """
@@ -534,7 +548,6 @@
                 curdir = sa.sort_direction
             idx += 1
         sortattr.append (val)
-        #print >> sys.stderr, "\nsortattr", sortattr
         sortattr = zip (*sortattr)
         for dir, i in reversed(zip(directions, dir_idx)):
             rev = dir == '-'
@@ -760,6 +773,16 @@
 
         """
 
+def iter_roles(roles):
+    ''' handle the text processing of turning the roles list
+        into something python can use more easily
+    '''
+    if not roles or not roles.strip():
+        raise StopIteration, "Empty roles given"
+    for role in [x.lower().strip() for x in roles.split(',')]:
+        yield role
+
+
 #
 # The base Class class
 #
@@ -928,7 +951,9 @@
         'date' is a Timestamp object specifying the time of the change and
         'tag' is the journaltag specified when the database was opened.
         """
-        raise NotImplementedError
+        if not self.do_journal:
+            raise ValueError('Journalling is disabled for this class')
+        return self.db.getjournal(self.classname, nodeid)
 
     # Locating nodes:
     def hasnode(self, nodeid):
@@ -1045,27 +1070,40 @@
         """
         raise NotImplementedError
 
-    def _proptree(self, filterspec, sortattr=[]):
+    def _proptree(self, filterspec, sortattr=[], retr=False):
         """Build a tree of all transitive properties in the given
         filterspec.
+        If we retrieve (retr is True) linked items we don't follow
+        across multilinks. We also don't follow if the searched value
+        can contain NULL values.
         """
-        proptree = Proptree(self.db, self, '', self.getprops())
+        proptree = Proptree(self.db, self, '', self.getprops(), retr=retr)
         for key, v in filterspec.iteritems():
             keys = key.split('.')
             p = proptree
+            mlseen = False
             for k in keys:
-                p = p.append(k)
+                if isinstance (p.propclass, Multilink):
+                    mlseen = True
+                isnull = v == '-1' or v is None
+                nullin = isinstance(v, type([])) and ('-1' in v or None in v)
+                r = retr and not mlseen and not isnull and not nullin
+                p = p.append(k, retr=r)
             p.val = v
         multilinks = {}
         for s in sortattr:
             keys = s[1].split('.')
             p = proptree
+            mlseen = False
             for k in keys:
-                p = p.append(k, sort_type = 2)
+                if isinstance (p.propclass, Multilink):
+                    mlseen = True
+                r = retr and not mlseen
+                p = p.append(k, need_for='sort', retr=r)
                 if isinstance (p.propclass, Multilink):
                     multilinks[p] = True
             if p.cls:
-                p = p.append(p.cls.orderprop(), sort_type = 2)
+                p = p.append(p.cls.orderprop(), need_for='sort')
             if p.sort_direction: # if an orderprop is also specified explicitly
                 continue
             p.sort_direction = s[0]
@@ -1091,7 +1129,7 @@
         for k in propname_path.split('.'):
             try:
                 prop = props[k]
-            except KeyError, TypeError:
+            except (KeyError, TypeError):
                 return default
             cl = getattr(prop, 'classname', None)
             props = None
@@ -1148,7 +1186,7 @@
         This implements a non-optimized version of Transitive search
         using _filter implemented in a backend class. A more efficient
         version can be implemented in the individual backends -- e.g.,
-        an SQL backen will want to create a single SQL statement and
+        an SQL backend will want to create a single SQL statement and
         override the filter method instead of implementing _filter.
         """
         sortattr = self._sortattr(sort = sort, group = group)
@@ -1156,6 +1194,13 @@
         proptree.search(search_matches)
         return proptree.sort()
 
+    # non-optimized filter_iter, a backend may chose to implement a
+    # better version that provides a real iterator that pre-fills the
+    # cache for each id returned. Note that the filter_iter doesn't
+    # promise to correctly sort by multilink (which isn't sane to do
+    # anyway).
+    filter_iter = filter
+
     def count(self):
         """Get the number of nodes in this class.
 
@@ -1228,6 +1273,83 @@
         propnames.sort()
         return propnames
 
+    def import_journals(self, entries):
+        """Import a class's journal.
+
+        Uses setjournal() to set the journal for each item.
+        Strategy for import: Sort first by id, then import journals for
+        each id, this way the memory footprint is a lot smaller than the
+        initial implementation which stored everything in a big hash by
+        id and then proceeded to import journals for each id."""
+        properties = self.getprops()
+        a = []
+        for l in entries:
+            # first element in sorted list is the (numeric) id
+            # in python2.4 and up we would use sorted with a key...
+            a.append ((int (l [0].strip ("'")), l))
+        a.sort ()
+
+
+        last = 0
+        r = []
+        for n, l in a:
+            nodeid, jdate, user, action, params = map(eval, l)
+            assert (str(n) == nodeid)
+            if n != last:
+                if r:
+                    self.db.setjournal(self.classname, str(last), r)
+                last = n
+                r = []
+
+            if action == 'set':
+                for propname, value in params.iteritems():
+                    prop = properties[propname]
+                    if value is None:
+                        pass
+                    elif isinstance(prop, Date):
+                        value = date.Date(value)
+                    elif isinstance(prop, Interval):
+                        value = date.Interval(value)
+                    elif isinstance(prop, Password):
+                        value = password.JournalPassword(encrypted=value)
+                    params[propname] = value
+            elif action == 'create' and params:
+                # old tracker with data stored in the create!
+                params = {}
+            r.append((nodeid, date.Date(jdate), user, action, params))
+        if r:
+            self.db.setjournal(self.classname, nodeid, r)
+
+    #
+    # convenience methods
+    #
+    def get_roles(self, nodeid):
+        """Return iterator for all roles for this nodeid.
+
+           Yields string-processed roles.
+           This method can be overridden to provide a hook where we can
+           insert other permission models (e.g. get roles from database)
+           In standard schemas only a user has a roles property but
+           this may be different in customized schemas.
+           Note that this is the *central place* where role
+           processing happens!
+        """
+        node = self.db.getnode(self.classname, nodeid)
+        return iter_roles(node['roles'])
+
+    def has_role(self, nodeid, *roles):
+        '''See if this node has any roles that appear in roles.
+
+           For convenience reasons we take a list.
+           In standard schemas only a user has a roles property but
+           this may be different in customized schemas.
+        '''
+        roles = dict.fromkeys ([r.strip().lower() for r in roles])
+        for role in self.get_roles(nodeid):
+            if role in roles:
+                return True
+        return False
+
 
 class HyperdbValueError(ValueError):
     """ Error converting a raw value into a Hyperdb value """

Modified: tracker/roundup-src/roundup/init.py
==============================================================================
--- tracker/roundup-src/roundup/init.py	(original)
+++ tracker/roundup-src/roundup/init.py	Thu Aug  4 15:46:52 2011
@@ -176,10 +176,12 @@
     finally:
         f.close()
 
-def write_select_db(instance_home, backend):
+def write_select_db(instance_home, backend, dbdir = 'db'):
     ''' Write the file that selects the backend for the tracker
     '''
-    dbdir = os.path.join(instance_home, 'db')
+    # dbdir may be a relative pathname, os.path.join does the right
+    # thing when the second component of a join is an absolute path
+    dbdir = os.path.join (instance_home, dbdir)
     if not os.path.exists(dbdir):
         os.makedirs(dbdir)
     f = open(os.path.join(dbdir, 'backend_name'), 'w')

Modified: tracker/roundup-src/roundup/instance.py
==============================================================================
--- tracker/roundup-src/roundup/instance.py	(original)
+++ tracker/roundup-src/roundup/instance.py	Thu Aug  4 15:46:52 2011
@@ -16,9 +16,15 @@
 # SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
 #
 
-"""Tracker handling (open tracker).
+"""Top-level tracker interface.
 
-Backwards compatibility for the old-style "imported" trackers.
+Open a tracker with:
+
+    >>> from roundup import instance
+    >>> db = instance.open('path to tracker home')
+
+The "db" handle you get back is the tracker's hyperdb which has the interface
+described in `roundup.hyperdb.Database`.
 """
 __docformat__ = 'restructuredtext'
 
@@ -80,8 +86,7 @@
                 sys.path.remove(libdir)
 
     def get_backend_name(self):
-        o = __builtins__['open']
-        f = o(os.path.join(self.tracker_home, 'db', 'backend_name'))
+        f = file(os.path.join(self.config.DATABASE, 'backend_name'))
         name = f.readline().strip()
         f.close()
         return name
@@ -107,6 +112,9 @@
             'db': backend.Database(self.config, name)
         }
 
+        libdir = os.path.join(self.tracker_home, 'lib')
+        if os.path.isdir(libdir):
+            sys.path.insert(1, libdir)
         if self.optimize:
             # execute preloaded schema object
             exec(self.schema, vars)
@@ -115,9 +123,6 @@
             # use preloaded detectors
             detectors = self.detectors
         else:
-            libdir = os.path.join(self.tracker_home, 'lib')
-            if os.path.isdir(libdir):
-                sys.path.insert(1, libdir)
             # execute the schema file
             self._load_python('schema.py', vars)
             if callable (self.schema_hook):
@@ -126,8 +131,8 @@
             for extension in self.get_extensions('extensions'):
                 extension(self)
             detectors = self.get_extensions('detectors')
-            if libdir in sys.path:
-                sys.path.remove(libdir)
+        if libdir in sys.path:
+            sys.path.remove(libdir)
         db = vars['db']
         # apply the detectors
         for detector in detectors:

Modified: tracker/roundup-src/roundup/mailer.py
==============================================================================
--- tracker/roundup-src/roundup/mailer.py	(original)
+++ tracker/roundup-src/roundup/mailer.py	Thu Aug  4 15:46:52 2011
@@ -9,7 +9,7 @@
 from roundup import __version__
 from roundup.date import get_timezone
 
-from email.Utils import formatdate, formataddr
+from email.Utils import formatdate, formataddr, specialsre, escapesre
 from email.Message import Message
 from email.Header import Header
 from email.Charset import Charset
@@ -26,6 +26,25 @@
     del msg['Content-Transfer-Encoding']
     msg['Content-Transfer-Encoding'] = 'quoted-printable'
 
+def nice_sender_header(name, address, charset):
+    # construct an address header so it's as human-readable as possible
+    # even in the presence of a non-ASCII name part
+    if not name:
+        return address
+    try:
+        encname = name.encode('ASCII')
+    except UnicodeEncodeError:
+        # use Header to encode correctly.
+        encname = Header(name, charset=charset).encode()
+
+    # the important bits of formataddr()
+    if specialsre.search(encname):
+        encname = '"%s"'%escapesre.sub(r'\\\g<0>', encname)
+
+    # now format the header as a string - don't return a Header as anonymous
+    # headers play poorly with Messages (eg. won't get wrapped properly)
+    return '%s <%s>'%(encname, address)
+
 class Mailer:
     """Roundup-specific mail sending."""
     def __init__(self, config):
@@ -62,14 +81,15 @@
         charset = getattr(self.config, 'EMAIL_CHARSET', 'utf-8')
         tracker_name = unicode(self.config.TRACKER_NAME, 'utf-8')
         if not author:
-            author = formataddr((tracker_name, self.config.ADMIN_EMAIL))
+            author = (tracker_name, self.config.ADMIN_EMAIL)
+            name = author[0]
         else:
             name = unicode(author[0], 'utf-8')
             try:
                 name.encode('ascii')
             except UnicodeError:
                 name = Charset(charset).header_encode(name.encode(charset))
-            author = formataddr((name, author[1]))
+        author = nice_sender_header(name, author[1], charset)
 
         if multipart:
             message = MIMEMultipart()
@@ -82,9 +102,7 @@
         except UnicodeError:
             message['Subject'] = Header(subject, charset)
         message['To'] = ', '.join(to)
-        # This should not fail, since we already encoded non-ASCII
-        # name characters
-        message['From'] = author.encode('ascii')
+        message['From'] = author
         message['Date'] = formatdate(localtime=True)
 
         # add a Precedence header so autoresponders ignore us
@@ -181,17 +199,22 @@
         content = '\n'.join(traceback.format_exception(*sys.exc_info()))
         self.standard_message(to, subject, data+content)
 
-    def smtp_send(self, to, message):
+    def smtp_send(self, to, message, sender=None):
         """Send a message over SMTP, using roundup's config.
 
         Arguments:
         - to: a list of addresses usable by rfc822.parseaddr().
         - message: a StringIO instance with a full message.
+        - sender: if not 'None', the email address to use as the
+        envelope sender.  If 'None', the admin email is used.
         """
+
+        if not sender:
+            sender = self.config.ADMIN_EMAIL
         if self.debug:
             # don't send - just write to a file
             open(self.debug, 'a').write('FROM: %s\nTO: %s\n%s\n' %
-                                        (self.config.ADMIN_EMAIL,
+                                        (sender,
                                          ', '.join(to), message))
         else:
             # now try to send the message
@@ -199,7 +222,7 @@
                 # send the message as admin so bounces are sent there
                 # instead of to roundup
                 smtp = SMTPConnection(self.config)
-                smtp.sendmail(self.config.ADMIN_EMAIL, to, message)
+                smtp.sendmail(sender, to, message)
             except socket.error, value:
                 raise MessageSendError("Error: couldn't send email: "
                                        "mailhost %s"%value)

Modified: tracker/roundup-src/roundup/mailgw.py
==============================================================================
--- tracker/roundup-src/roundup/mailgw.py	(original)
+++ tracker/roundup-src/roundup/mailgw.py	Thu Aug  4 15:46:52 2011
@@ -27,6 +27,9 @@
    and given "file" class nodes that are linked to the "msg" node.
  . In a multipart/alternative message or part, we look for a text/plain
    subpart and ignore the other parts.
+ . A message/rfc822 is treated similar tomultipart/mixed (except for
+   special handling of the first text part) if unpack_rfc822 is set in
+   the mailgw config section.
 
 Summary
 -------
@@ -86,6 +89,7 @@
 from roundup import configuration, hyperdb, date, password, rfc2822, exceptions
 from roundup.mailer import Mailer, MessageSendError
 from roundup.i18n import _
+from roundup.hyperdb import iter_roles
 
 try:
     import pyme, pyme.core, pyme.gpgme
@@ -163,24 +167,6 @@
         yield sig
         sig = sig.next
 
-
-def iter_roles(roles):
-    ''' handle the text processing of turning the roles list
-        into something python can use more easily
-    '''
-    for role in [x.lower().strip() for x in roles.split(',')]:
-        yield role
-
-def user_has_role(db, userid, role_list):
-    ''' see if the given user has any roles that appear
-        in the role_list
-    '''
-    for role in iter_roles(db.user.get(userid, 'roles')):
-        if role in iter_roles(role_list):
-            return True
-    return False
-
-
 def check_pgp_sigs(sig, gpgctx, author):
     ''' Theoretically a PGP message can have several signatures. GPGME
         returns status on all signatures in a linked list. Walk that
@@ -261,41 +247,55 @@
             parts.append(part)
         return parts
 
-    def getheader(self, name, default=None):
-        hdr = mimetools.Message.getheader(self, name, default)
-        if not hdr:
-            return ''
-        if hdr:
-            hdr = hdr.replace('\n','') # Inserted by rfc822.readheaders
-        # historically this method has returned utf-8 encoded string
+    def _decode_header_to_utf8(self, hdr):
         l = []
+        prev_encoded = False
         for part, encoding in decode_header(hdr):
             if encoding:
                 part = part.decode(encoding)
+            # RFC 2047 specifies that between encoded parts spaces are
+            # swallowed while at the borders from encoded to non-encoded
+            # or vice-versa we must preserve a space. Multiple adjacent
+            # non-encoded parts should not occur.
+            if l and prev_encoded != bool(encoding):
+                l.append(' ')
+            prev_encoded = bool(encoding)
             l.append(part)
         return ''.join([s.encode('utf-8') for s in l])
 
+    def getheader(self, name, default=None):
+        hdr = mimetools.Message.getheader(self, name, default)
+        # TODO are there any other False values possible?
+        # TODO if not hdr: return hdr
+        if hdr is None:
+            return None
+        if not hdr:
+            return ''
+        if hdr:
+            hdr = hdr.replace('\n','') # Inserted by rfc822.readheaders
+        return self._decode_header_to_utf8(hdr)
+
     def getaddrlist(self, name):
         # overload to decode the name part of the address
         l = []
         for (name, addr) in mimetools.Message.getaddrlist(self, name):
-            p = []
-            for part, encoding in decode_header(name):
-                if encoding:
-                    part = part.decode(encoding)
-                p.append(part)
-            name = ''.join([s.encode('utf-8') for s in p])
+            name = self._decode_header_to_utf8(name)
             l.append((name, addr))
         return l
 
     def getname(self):
         """Find an appropriate name for this message."""
+        name = None
         if self.gettype() == 'message/rfc822':
             # handle message/rfc822 specially - the name should be
             # the subject of the actual e-mail embedded here
+            # we add a '.eml' extension like other email software does it
             self.fp.seek(0)
-            name = Message(self.fp).getheader('subject')
-        else:
+            s = cStringIO.StringIO(self.getbody())
+            name = Message(s).getheader('subject')
+            if name:
+                name = name + '.eml'
+        if not name:
             # try name on Content-Type
             name = self.getparam('name')
             if not name:
@@ -368,8 +368,11 @@
     #   flagging.
     # multipart/form-data:
     #   For web forms only.
+    # message/rfc822:
+    #   Only if configured in [mailgw] unpack_rfc822
 
-    def extract_content(self, parent_type=None, ignore_alternatives = False):
+    def extract_content(self, parent_type=None, ignore_alternatives=False,
+        unpack_rfc822=False):
         """Extract the body and the attachments recursively.
 
            If the content is hidden inside a multipart/alternative part,
@@ -387,7 +390,7 @@
             ig = ignore_alternatives and not content_found
             for part in self.getparts():
                 new_content, new_attach = part.extract_content(content_type,
-                    not content and ig)
+                    not content and ig, unpack_rfc822)
 
                 # If we haven't found a text/plain part yet, take this one,
                 # otherwise make it an attachment.
@@ -412,6 +415,13 @@
                 attachments.extend(new_attach)
             if ig and content_type == 'multipart/alternative' and content:
                 attachments = []
+        elif unpack_rfc822 and content_type == 'message/rfc822':
+            s = cStringIO.StringIO(self.getbody())
+            m = Message(s)
+            ig = ignore_alternatives and not content
+            new_content, attachments = m.extract_content(m.gettype(), ig,
+                unpack_rfc822)
+            attachments.insert(0, m.text_as_attachment())
         elif (parent_type == 'multipart/signed' and
               content_type == 'application/pgp-signature'):
             # ignore it so it won't be saved as an attachment
@@ -522,877 +532,1079 @@
         result = context.op_verify_result()
         check_pgp_sigs(result.signatures, context, author)
 
-class MailGW:
+class parsedMessage:
 
-    def __init__(self, instance, arguments=()):
-        self.instance = instance
-        self.arguments = arguments
-        self.default_class = None
-        for option, value in self.arguments:
-            if option == '-c':
-                self.default_class = value.strip()
+    def __init__(self, mailgw, message):
+        self.mailgw = mailgw
+        self.config = mailgw.instance.config
+        self.db = mailgw.db
+        self.message = message
+        self.subject = message.getheader('subject', '')
+        self.has_prefix = False
+        self.matches = dict.fromkeys(['refwd', 'quote', 'classname',
+                                 'nodeid', 'title', 'args', 'argswhole'])
+        self.from_list = message.getaddrlist('resent-from') \
+                         or message.getaddrlist('from')
+        self.pfxmode = self.config['MAILGW_SUBJECT_PREFIX_PARSING']
+        self.sfxmode = self.config['MAILGW_SUBJECT_SUFFIX_PARSING']
+        # these are filled in by subsequent parsing steps
+        self.classname = None
+        self.properties = None
+        self.cl = None
+        self.nodeid = None
+        self.author = None
+        self.recipients = None
+        self.msg_props = {}
+        self.props = None
+        self.content = None
+        self.attachments = None
+
+    def handle_ignore(self):
+        ''' Check to see if message can be safely ignored:
+            detect loops and
+            Precedence: Bulk, or Microsoft Outlook autoreplies
+        '''
+        if self.message.getheader('x-roundup-loop', ''):
+            raise IgnoreLoop
+        if (self.message.getheader('precedence', '') == 'bulk'
+                or self.subject.lower().find("autoreply") > 0):
+            raise IgnoreBulk
 
-        self.mailer = Mailer(instance.config)
-        self.logger = logging.getLogger('mailgw')
+    def handle_help(self):
+        ''' Check to see if the message contains a usage/help request
+        '''
+        if self.subject.strip().lower() == 'help':
+            raise MailUsageHelp
 
-        # should we trap exceptions (normal usage) or pass them through
-        # (for testing)
-        self.trapExceptions = 1
+    def check_subject(self):
+        ''' Check to see if the message contains a valid subject line
+        '''
+        if not self.subject:
+            raise MailUsageError, _("""
+Emails to Roundup trackers must include a Subject: line!
+""")
 
-    def do_pipe(self):
-        """ Read a message from standard input and pass it to the mail handler.
+    def parse_subject(self):
+        ''' Matches subjects like:
+        Re: "[issue1234] title of issue [status=resolved]"
+        
+        Each part of the subject is matched, stored, then removed from the
+        start of the subject string as needed. The stored values are then
+        returned
+        '''
 
-            Read into an internal structure that we can seek on (in case
-            there's an error).
+        tmpsubject = self.subject
 
-            XXX: we may want to read this into a temporary file instead...
-        """
-        s = cStringIO.StringIO()
-        s.write(sys.stdin.read())
-        s.seek(0)
-        self.main(s)
-        return 0
+        sd_open, sd_close = self.config['MAILGW_SUBJECT_SUFFIX_DELIMITERS']
+        delim_open = re.escape(sd_open)
+        if delim_open in '[(': delim_open = '\\' + delim_open
+        delim_close = re.escape(sd_close)
+        if delim_close in '[(': delim_close = '\\' + delim_close
 
-    def do_mailbox(self, filename):
-        """ Read a series of messages from the specified unix mailbox file and
-            pass each to the mail handler.
-        """
-        # open the spool file and lock it
-        import fcntl
-        # FCNTL is deprecated in py2.3 and fcntl takes over all the symbols
-        if hasattr(fcntl, 'LOCK_EX'):
-            FCNTL = fcntl
+        # Look for Re: et. al. Used later on for MAILGW_SUBJECT_CONTENT_MATCH
+        re_re = r"(?P<refwd>%s)\s*" % self.config["MAILGW_REFWD_RE"].pattern
+        m = re.match(re_re, tmpsubject, re.IGNORECASE|re.VERBOSE|re.UNICODE)
+        if m:
+            m = m.groupdict()
+            if m['refwd']:
+                self.matches.update(m)
+                tmpsubject = tmpsubject[len(m['refwd']):] # Consume Re:
+
+        # Look for Leading "
+        m = re.match(r'(?P<quote>\s*")', tmpsubject,
+                     re.IGNORECASE)
+        if m:
+            self.matches.update(m.groupdict())
+            tmpsubject = tmpsubject[len(self.matches['quote']):] # Consume quote
+
+        # Check if the subject includes a prefix
+        self.has_prefix = re.search(r'^%s(\w+)%s'%(delim_open,
+            delim_close), tmpsubject.strip())
+
+        # Match the classname if specified
+        class_re = r'%s(?P<classname>(%s))(?P<nodeid>\d+)?%s'%(delim_open,
+            "|".join(self.db.getclasses()), delim_close)
+        # Note: re.search, not re.match as there might be garbage
+        # (mailing list prefix, etc.) before the class identifier
+        m = re.search(class_re, tmpsubject, re.IGNORECASE)
+        if m:
+            self.matches.update(m.groupdict())
+            # Skip to the end of the class identifier, including any
+            # garbage before it.
+
+            tmpsubject = tmpsubject[m.end():]
+
+        # Match the title of the subject
+        # if we've not found a valid classname prefix then force the
+        # scanning to handle there being a leading delimiter
+        title_re = r'(?P<title>%s[^%s]*)'%(
+            not self.matches['classname'] and '.' or '', delim_open)
+        m = re.match(title_re, tmpsubject.strip(), re.IGNORECASE)
+        if m:
+            self.matches.update(m.groupdict())
+            tmpsubject = tmpsubject[len(self.matches['title']):] # Consume title
+
+        if self.matches['title']:
+            self.matches['title'] = self.matches['title'].strip()
         else:
-            import FCNTL
-        f = open(filename, 'r+')
-        fcntl.flock(f.fileno(), FCNTL.LOCK_EX)
+            self.matches['title'] = ''
 
-        # handle and clear the mailbox
-        try:
-            from mailbox import UnixMailbox
-            mailbox = UnixMailbox(f, factory=Message)
-            # grab one message
-            message = mailbox.next()
-            while message:
-                # handle this message
-                self.handle_Message(message)
-                message = mailbox.next()
-            # nuke the file contents
-            os.ftruncate(f.fileno(), 0)
-        except:
-            import traceback
-            traceback.print_exc()
-            return 1
-        fcntl.flock(f.fileno(), FCNTL.LOCK_UN)
+        # strip off the quotes that dumb emailers put around the subject, like
+        #      Re: "[issue1] bla blah"
+        if self.matches['quote'] and self.matches['title'].endswith('"'):
+            self.matches['title'] = self.matches['title'][:-1]
+        
+        # Match any arguments specified
+        args_re = r'(?P<argswhole>%s(?P<args>.+?)%s)?'%(delim_open,
+            delim_close)
+        m = re.search(args_re, tmpsubject.strip(), re.IGNORECASE|re.VERBOSE)
+        if m:
+            self.matches.update(m.groupdict())
+
+    def rego_confirm(self):
+        ''' Check for registration OTK and confirm the registration if found
+        '''
+        
+        if self.config['EMAIL_REGISTRATION_CONFIRMATION']:
+            otk_re = re.compile('-- key (?P<otk>[a-zA-Z0-9]{32})')
+            otk = otk_re.search(self.matches['title'] or '')
+            if otk:
+                self.db.confirm_registration(otk.group('otk'))
+                subject = 'Your registration to %s is complete' % \
+                          self.config['TRACKER_NAME']
+                sendto = [self.from_list[0][1]]
+                self.mailgw.mailer.standard_message(sendto, subject, '')
+                return 1
         return 0
 
-    def do_imap(self, server, user='', password='', mailbox='', ssl=0):
-        ''' Do an IMAP connection
+    def get_classname(self):
+        ''' Determine the classname of the node being created/edited
         '''
-        import getpass, imaplib, socket
-        try:
-            if not user:
-                user = raw_input('User: ')
-            if not password:
-                password = getpass.getpass()
-        except (KeyboardInterrupt, EOFError):
-            # Ctrl C or D maybe also Ctrl Z under Windows.
-            print "\nAborted by user."
-            return 1
-        # open a connection to the server and retrieve all messages
-        try:
-            if ssl:
-                self.logger.debug('Trying server %r with ssl'%server)
-                server = imaplib.IMAP4_SSL(server)
-            else:
-                self.logger.debug('Trying server %r without ssl'%server)
-                server = imaplib.IMAP4(server)
-        except (imaplib.IMAP4.error, socket.error, socket.sslerror):
-            self.logger.exception('IMAP server error')
-            return 1
+        subject = self.subject
 
-        try:
-            server.login(user, password)
-        except imaplib.IMAP4.error, e:
-            self.logger.exception('IMAP login failure')
-            return 1
+        # get the classname
+        if self.pfxmode == 'none':
+            classname = None
+        else:
+            classname = self.matches['classname']
 
-        try:
-            if not mailbox:
-                (typ, data) = server.select()
-            else:
-                (typ, data) = server.select(mailbox=mailbox)
-            if typ != 'OK':
-                self.logger.error('Failed to get mailbox %r: %s'%(mailbox,
-                    data))
-                return 1
-            try:
-                numMessages = int(data[0])
-            except ValueError, value:
-                self.logger.error('Invalid message count from mailbox %r'%
-                    data[0])
-                return 1
-            for i in range(1, numMessages+1):
-                (typ, data) = server.fetch(str(i), '(RFC822)')
+        if not classname and self.has_prefix and self.pfxmode == 'strict':
+            raise MailUsageError, _("""
+The message you sent to roundup did not contain a properly formed subject
+line. The subject must contain a class name or designator to indicate the
+'topic' of the message. For example:
+    Subject: [issue] This is a new issue
+      - this will create a new issue in the tracker with the title 'This is
+        a new issue'.
+    Subject: [issue1234] This is a followup to issue 1234
+      - this will append the message's contents to the existing issue 1234
+        in the tracker.
 
-                # mark the message as deleted.
-                server.store(str(i), '+FLAGS', r'(\Deleted)')
+Subject was: '%(subject)s'
+""") % locals()
 
-                # process the message
-                s = cStringIO.StringIO(data[0][1])
-                s.seek(0)
-                self.handle_Message(Message(s))
-            server.close()
-        finally:
+        # try to get the class specified - if "loose" or "none" then fall
+        # back on the default
+        attempts = []
+        if classname:
+            attempts.append(classname)
+
+        if self.mailgw.default_class:
+            attempts.append(self.mailgw.default_class)
+        else:
+            attempts.append(self.config['MAILGW_DEFAULT_CLASS'])
+
+        # first valid class name wins
+        self.cl = None
+        for trycl in attempts:
             try:
-                server.expunge()
-            except:
+                self.cl = self.db.getclass(trycl)
+                classname = self.classname = trycl
+                break
+            except KeyError:
                 pass
-            server.logout()
-
-        return 0
 
+        if not self.cl:
+            validname = ', '.join(self.db.getclasses())
+            if classname:
+                raise MailUsageError, _("""
+The class name you identified in the subject line ("%(classname)s") does
+not exist in the database.
 
-    def do_apop(self, server, user='', password='', ssl=False):
-        ''' Do authentication POP
-        '''
-        self._do_pop(server, user, password, True, ssl)
+Valid class names are: %(validname)s
+Subject was: "%(subject)s"
+""") % locals()
+            else:
+                raise MailUsageError, _("""
+You did not identify a class name in the subject line and there is no
+default set for this tracker. The subject must contain a class name or
+designator to indicate the 'topic' of the message. For example:
+    Subject: [issue] This is a new issue
+      - this will create a new issue in the tracker with the title 'This is
+        a new issue'.
+    Subject: [issue1234] This is a followup to issue 1234
+      - this will append the message's contents to the existing issue 1234
+        in the tracker.
 
-    def do_pop(self, server, user='', password='', ssl=False):
-        ''' Do plain POP
-        '''
-        self._do_pop(server, user, password, False, ssl)
+Subject was: '%(subject)s'
+""") % locals()
+        # get the class properties
+        self.properties = self.cl.getprops()
+        
 
-    def _do_pop(self, server, user, password, apop, ssl):
-        '''Read a series of messages from the specified POP server.
+    def get_nodeid(self):
+        ''' Determine the nodeid from the message and return it if found
         '''
-        import getpass, poplib, socket
-        try:
-            if not user:
-                user = raw_input('User: ')
-            if not password:
-                password = getpass.getpass()
-        except (KeyboardInterrupt, EOFError):
-            # Ctrl C or D maybe also Ctrl Z under Windows.
-            print "\nAborted by user."
-            return 1
+        title = self.matches['title']
+        subject = self.subject
+        
+        if self.pfxmode == 'none':
+            nodeid = None
+        else:
+            nodeid = self.matches['nodeid']
 
-        # open a connection to the server and retrieve all messages
-        try:
-            if ssl:
-                klass = poplib.POP3_SSL
-            else:
-                klass = poplib.POP3
-            server = klass(server)
-        except socket.error:
-            self.logger.exception('POP server error')
-            return 1
-        if apop:
-            server.apop(user, password)
-        else:
-            server.user(user)
-            server.pass_(password)
-        numMessages = len(server.list()[1])
-        for i in range(1, numMessages+1):
-            # retr: returns
-            # [ pop response e.g. '+OK 459 octets',
-            #   [ array of message lines ],
-            #   number of octets ]
-            lines = server.retr(i)[1]
-            s = cStringIO.StringIO('\n'.join(lines))
-            s.seek(0)
-            self.handle_Message(Message(s))
-            # delete the message
-            server.dele(i)
+        # try in-reply-to to match the message if there's no nodeid
+        inreplyto = self.message.getheader('in-reply-to') or ''
+        if nodeid is None and inreplyto:
+            l = self.db.getclass('msg').stringFind(messageid=inreplyto)
+            if l:
+                nodeid = self.cl.filter(None, {'messages':l})[0]
 
-        # quit the server to commit changes.
-        server.quit()
-        return 0
 
-    def main(self, fp):
-        ''' fp - the file from which to read the Message.
-        '''
-        return self.handle_Message(Message(fp))
+        # but we do need either a title or a nodeid...
+        if nodeid is None and not title:
+            raise MailUsageError, _("""
+I cannot match your message to a node in the database - you need to either
+supply a full designator (with number, eg "[issue123]") or keep the
+previous subject title intact so I can match that.
 
-    def handle_Message(self, message):
-        """Handle an RFC822 Message
+Subject was: "%(subject)s"
+""") % locals()
 
-        Handle the Message object by calling handle_message() and then cope
-        with any errors raised by handle_message.
-        This method's job is to make that call and handle any
-        errors in a sane manner. It should be replaced if you wish to
-        handle errors in a different manner.
-        """
-        # in some rare cases, a particularly stuffed-up e-mail will make
-        # its way into here... try to handle it gracefully
+        # If there's no nodeid, check to see if this is a followup and
+        # maybe someone's responded to the initial mail that created an
+        # entry. Try to find the matching nodes with the same title, and
+        # use the _last_ one matched (since that'll _usually_ be the most
+        # recent...). The subject_content_match config may specify an
+        # additional restriction based on the matched node's creation or
+        # activity.
+        tmatch_mode = self.config['MAILGW_SUBJECT_CONTENT_MATCH']
+        if tmatch_mode != 'never' and nodeid is None and self.matches['refwd']:
+            l = self.cl.stringFind(title=title)
+            limit = None
+            if (tmatch_mode.startswith('creation') or
+                    tmatch_mode.startswith('activity')):
+                limit, interval = tmatch_mode.split(' ', 1)
+                threshold = date.Date('.') - date.Interval(interval)
+            for id in l:
+                if limit:
+                    if threshold < self.cl.get(id, limit):
+                        nodeid = id
+                else:
+                    nodeid = id
 
-        sendto = message.getaddrlist('resent-from')
-        if not sendto:
-            sendto = message.getaddrlist('from')
-        if not sendto:
-            # very bad-looking message - we don't even know who sent it
-            msg = ['Badly formed message from mail gateway. Headers:']
-            msg.extend(message.headers)
-            msg = '\n'.join(map(str, msg))
-            self.logger.error(msg)
-            return
+        # if a nodeid was specified, make sure it's valid
+        if nodeid is not None and not self.cl.hasnode(nodeid):
+            if self.pfxmode == 'strict':
+                raise MailUsageError, _("""
+The node specified by the designator in the subject of your message
+("%(nodeid)s") does not exist.
 
-        msg = 'Handling message'
-        if message.getheader('message-id'):
-            msg += ' (Message-id=%r)'%message.getheader('message-id')
-        self.logger.info(msg)
+Subject was: "%(subject)s"
+""") % locals()
+            else:
+                nodeid = None
+        self.nodeid = nodeid
 
-        # try normal message-handling
-        if not self.trapExceptions:
-            return self.handle_message(message)
+    def get_author_id(self):
+        ''' Attempt to get the author id from the existing registered users,
+            otherwise attempt to register a new user and return their id
+        '''
+        # Don't create users if anonymous isn't allowed to register
+        create = 1
+        anonid = self.db.user.lookup('anonymous')
+        if not (self.db.security.hasPermission('Register', anonid, 'user')
+                and self.db.security.hasPermission('Email Access', anonid)):
+            create = 0
 
-        # no, we want to trap exceptions
-        try:
-            return self.handle_message(message)
-        except MailUsageHelp:
-            # bounce the message back to the sender with the usage message
-            fulldoc = '\n'.join(string.split(__doc__, '\n')[2:])
-            m = ['']
-            m.append('\n\nMail Gateway Help\n=================')
-            m.append(fulldoc)
-            self.mailer.bounce_message(message, [sendto[0][1]], m,
-                subject="Mail Gateway Help")
-        except MailUsageError, value:
-            # bounce the message back to the sender with the usage message
-            fulldoc = '\n'.join(string.split(__doc__, '\n')[2:])
-            m = ['']
-            m.append(str(value))
-            m.append('\n\nMail Gateway Help\n=================')
-            m.append(fulldoc)
-            self.mailer.bounce_message(message, [sendto[0][1]], m)
-        except Unauthorized, value:
-            # just inform the user that he is not authorized
-            m = ['']
-            m.append(str(value))
-            self.mailer.bounce_message(message, [sendto[0][1]], m)
-        except IgnoreMessage:
-            # do not take any action
-            # this exception is thrown when email should be ignored
-            msg = 'IgnoreMessage raised'
-            if message.getheader('message-id'):
-                msg += ' (Message-id=%r)'%message.getheader('message-id')
-            self.logger.info(msg)
-            return
-        except:
-            msg = 'Exception handling message'
-            if message.getheader('message-id'):
-                msg += ' (Message-id=%r)'%message.getheader('message-id')
-            self.logger.exception(msg)
+        # ok, now figure out who the author is - create a new user if the
+        # "create" flag is true
+        author = uidFromAddress(self.db, self.from_list[0], create=create)
 
-            # bounce the message back to the sender with the error message
-            # let the admin know that something very bad is happening
-            m = ['']
-            m.append('An unexpected error occurred during the processing')
-            m.append('of your message. The tracker administrator is being')
-            m.append('notified.\n')
-            self.mailer.bounce_message(message, [sendto[0][1]], m)
+        # if we're not recognised, and we don't get added as a user, then we
+        # must be anonymous
+        if not author:
+            author = anonid
 
-            m.append('----------------')
-            m.append(traceback.format_exc())
-            self.mailer.bounce_message(message, [self.instance.config.ADMIN_EMAIL], m)
+        # make sure the author has permission to use the email interface
+        if not self.db.security.hasPermission('Email Access', author):
+            if author == anonid:
+                # we're anonymous and we need to be a registered user
+                from_address = self.from_list[0][1]
+                registration_info = ""
+                if self.db.security.hasPermission('Web Access', author) and \
+                   self.db.security.hasPermission('Register', anonid, 'user'):
+                    tracker_web = self.config.TRACKER_WEB
+                    registration_info = """ Please register at:
 
-    def handle_message(self, message):
-        ''' message - a Message instance
+%(tracker_web)suser?template=register
 
-        Parse the message as per the module docstring.
-        '''
-        # get database handle for handling one email
-        self.db = self.instance.open ('admin')
-        try:
-            return self._handle_message (message)
-        finally:
-            self.db.close()
+...before sending mail to the tracker.""" % locals()
 
-    def _handle_message(self, message):
-        ''' message - a Message instance
+                raise Unauthorized, _("""
+You are not a registered user.%(registration_info)s
 
-        Parse the message as per the module docstring.
+Unknown address: %(from_address)s
+""") % locals()
+            else:
+                # we're registered and we're _still_ not allowed access
+                raise Unauthorized, _(
+                    'You are not permitted to access this tracker.')
+        self.author = author
 
-        The implementation expects an opened database and a try/finally
-        that closes the database.
+    def check_node_permissions(self):
+        ''' Check if the author has permission to edit or create this
+            class of node
         '''
-        # detect loops
-        if message.getheader('x-roundup-loop', ''):
-            raise IgnoreLoop
-
-        # handle the subject line
-        subject = message.getheader('subject', '')
-        if not subject:
-            raise MailUsageError, _("""
-Emails to Roundup trackers must include a Subject: line!
-""")
+        if self.nodeid:
+            if not self.db.security.hasPermission('Edit', self.author,
+                    self.classname, itemid=self.nodeid):
+                raise Unauthorized, _(
+                    'You are not permitted to edit %(classname)s.'
+                    ) % self.__dict__
+        else:
+            if not self.db.security.hasPermission('Create', self.author,
+                    self.classname):
+                raise Unauthorized, _(
+                    'You are not permitted to create %(classname)s.'
+                    ) % self.__dict__
 
-        # detect Precedence: Bulk, or Microsoft Outlook autoreplies
-        if (message.getheader('precedence', '') == 'bulk'
-                or subject.lower().find("autoreply") > 0):
-            raise IgnoreBulk
+    def commit_and_reopen_as_author(self):
+        ''' the author may have been created - make sure the change is
+            committed before we reopen the database
+            then re-open the database as the author
+        '''
+        self.db.commit()
 
-        if subject.strip().lower() == 'help':
-            raise MailUsageHelp
+        # set the database user as the author
+        username = self.db.user.get(self.author, 'username')
+        self.db.setCurrentUser(username)
 
-        # config is used many times in this method.
-        # make local variable for easier access
-        config = self.instance.config
-
-        # determine the sender's address
-        from_list = message.getaddrlist('resent-from')
-        if not from_list:
-            from_list = message.getaddrlist('from')
+        # re-get the class with the new database connection
+        self.cl = self.db.getclass(self.classname)
 
-        # XXX Don't enable. This doesn't work yet.
-#  "[^A-z.]tracker\+(?P<classname>[^\d\s]+)(?P<nodeid>\d+)\@some.dom.ain[^A-z.]"
-        # handle delivery to addresses like:tracker+issue25 at some.dom.ain
-        # use the embedded issue number as our issue
-#        issue_re = config['MAILGW_ISSUE_ADDRESS_RE']
-#        if issue_re:
-#            for header in ['to', 'cc', 'bcc']:
-#                addresses = message.getheader(header, '')
-#            if addresses:
-#              # FIXME, this only finds the first match in the addresses.
-#                issue = re.search(issue_re, addresses, 'i')
-#                if issue:
-#                    classname = issue.group('classname')
-#                    nodeid = issue.group('nodeid')
-#                    break
-
-        # Matches subjects like:
-        # Re: "[issue1234] title of issue [status=resolved]"
-
-        # Alias since we need a reference to the original subject for
-        # later use in error messages
-        tmpsubject = subject
+    def get_recipients(self):
+        ''' Get the list of recipients who were included in message and
+            register them as users if possible
+        '''
+        # Don't create users if anonymous isn't allowed to register
+        create = 1
+        anonid = self.db.user.lookup('anonymous')
+        if not (self.db.security.hasPermission('Register', anonid, 'user')
+                and self.db.security.hasPermission('Email Access', anonid)):
+            create = 0
 
-        sd_open, sd_close = config['MAILGW_SUBJECT_SUFFIX_DELIMITERS']
-        delim_open = re.escape(sd_open)
-        if delim_open in '[(': delim_open = '\\' + delim_open
-        delim_close = re.escape(sd_close)
-        if delim_close in '[(': delim_close = '\\' + delim_close
+        # get the user class arguments from the commandline
+        user_props = self.mailgw.get_class_arguments('user')
 
-        matches = dict.fromkeys(['refwd', 'quote', 'classname',
-                                 'nodeid', 'title', 'args',
-                                 'argswhole'])
+        # now update the recipients list
+        recipients = []
+        tracker_email = self.config['TRACKER_EMAIL'].lower()
+        msg_to = self.message.getaddrlist('to')
+        msg_cc = self.message.getaddrlist('cc')
+        for recipient in msg_to + msg_cc:
+            r = recipient[1].strip().lower()
+            if r == tracker_email or not r:
+                continue
 
-        # Look for Re: et. al. Used later on for MAILGW_SUBJECT_CONTENT_MATCH
-        re_re = r"(?P<refwd>%s)\s*" % config["MAILGW_REFWD_RE"].pattern
-        m = re.match(re_re, tmpsubject, re.IGNORECASE|re.VERBOSE|re.UNICODE)
-        if m:
-            m = m.groupdict()
-            if m['refwd']:
-                matches.update(m)
-                tmpsubject = tmpsubject[len(m['refwd']):] # Consume Re:
+            # look up the recipient - create if necessary (and we're
+            # allowed to)
+            recipient = uidFromAddress(self.db, recipient, create, **user_props)
 
-        # Look for Leading "
-        m = re.match(r'(?P<quote>\s*")', tmpsubject,
-                     re.IGNORECASE)
-        if m:
-            matches.update(m.groupdict())
-            tmpsubject = tmpsubject[len(matches['quote']):] # Consume quote
+            # if all's well, add the recipient to the list
+            if recipient:
+                recipients.append(recipient)
+        self.recipients = recipients
 
-        has_prefix = re.search(r'^%s(\w+)%s'%(delim_open,
-            delim_close), tmpsubject.strip())
+    def get_props(self):
+        ''' Generate all the props for the new/updated node and return them
+        '''
+        subject = self.subject
+        
+        # get the commandline arguments for issues
+        issue_props = self.mailgw.get_class_arguments('issue', self.classname)
+        
+        #
+        # handle the subject argument list
+        #
+        # figure what the properties of this Class are
+        props = {}
+        args = self.matches['args']
+        argswhole = self.matches['argswhole']
+        title = self.matches['title']
+        
+        # Reform the title 
+        if self.matches['nodeid'] and self.nodeid is None:
+            title = subject
+        
+        if args:
+            if self.sfxmode == 'none':
+                title += ' ' + argswhole
+            else:
+                errors, props = setPropArrayFromString(self, self.cl, args,
+                    self.nodeid)
+                # handle any errors parsing the argument list
+                if errors:
+                    if self.sfxmode == 'strict':
+                        errors = '\n- '.join(map(str, errors))
+                        raise MailUsageError, _("""
+There were problems handling your subject line argument list:
+- %(errors)s
 
-        class_re = r'%s(?P<classname>(%s))(?P<nodeid>\d+)?%s'%(delim_open,
-            "|".join(self.db.getclasses()), delim_close)
-        # Note: re.search, not re.match as there might be garbage
-        # (mailing list prefix, etc.) before the class identifier
-        m = re.search(class_re, tmpsubject, re.IGNORECASE)
-        if m:
-            matches.update(m.groupdict())
-            # Skip to the end of the class identifier, including any
-            # garbage before it.
+Subject was: "%(subject)s"
+""") % locals()
+                    else:
+                        title += ' ' + argswhole
 
-            tmpsubject = tmpsubject[m.end():]
 
-        # if we've not found a valid classname prefix then force the
-        # scanning to handle there being a leading delimiter
-        title_re = r'(?P<title>%s[^%s]+)'%(
-            not matches['classname'] and '.' or '', delim_open)
-        m = re.match(title_re, tmpsubject.strip(), re.IGNORECASE)
-        if m:
-            matches.update(m.groupdict())
-            tmpsubject = tmpsubject[len(matches['title']):] # Consume title
+        # set the issue title to the subject
+        title = title.strip()
+        if (title and self.properties.has_key('title') and not
+                issue_props.has_key('title')):
+            issue_props['title'] = title
+        if (self.nodeid and self.properties.has_key('title') and not
+                self.config['MAILGW_SUBJECT_UPDATES_TITLE']):
+            issue_props['title'] = self.cl.get(self.nodeid,'title')
+
+        # merge the command line props defined in issue_props into
+        # the props dictionary because function(**props, **issue_props)
+        # is a syntax error.
+        for prop in issue_props.keys() :
+            if not props.has_key(prop) :
+                props[prop] = issue_props[prop]
+
+        self.props = props
+
+    def get_pgp_message(self):
+        ''' If they've enabled PGP processing then verify the signature
+            or decrypt the message
+        '''
+        def pgp_role():
+            """ if PGP_ROLES is specified the user must have a Role in the list
+                or we will skip PGP processing
+            """
+            if self.config.PGP_ROLES:
+                return self.db.user.has_role(self.author,
+                    iter_roles(self.config.PGP_ROLES))
+            else:
+                return True
 
-        args_re = r'(?P<argswhole>%s(?P<args>.+?)%s)?'%(delim_open,
-            delim_close)
-        m = re.search(args_re, tmpsubject.strip(), re.IGNORECASE|re.VERBOSE)
-        if m:
-            matches.update(m.groupdict())
+        if self.config.PGP_ENABLE and pgp_role():
+            assert pyme, 'pyme is not installed'
+            # signed/encrypted mail must come from the primary address
+            author_address = self.db.user.get(self.author, 'address')
+            if self.config.PGP_HOMEDIR:
+                os.environ['GNUPGHOME'] = self.config.PGP_HOMEDIR
+            if self.message.pgp_signed():
+                self.message.verify_signature(author_address)
+            elif self.message.pgp_encrypted():
+                # replace message with the contents of the decrypted
+                # message for content extraction
+                # TODO: encrypted message handling is far from perfect
+                # bounces probably include the decrypted message, for
+                # instance :(
+                self.message = self.message.decrypt(author_address)
+            else:
+                raise MailUsageError, _("""
+This tracker has been configured to require all email be PGP signed or
+encrypted.""")
 
-        # figure subject line parsing modes
-        pfxmode = config['MAILGW_SUBJECT_PREFIX_PARSING']
-        sfxmode = config['MAILGW_SUBJECT_SUFFIX_PARSING']
+    def get_content_and_attachments(self):
+        ''' get the attachments and first text part from the message
+        '''
+        ig = self.config.MAILGW_IGNORE_ALTERNATIVES
+        self.content, self.attachments = self.message.extract_content(
+            ignore_alternatives=ig,
+            unpack_rfc822=self.config.MAILGW_UNPACK_RFC822)
+        
 
-        # check for registration OTK
-        # or fallback on the default class
-        if self.db.config['EMAIL_REGISTRATION_CONFIRMATION']:
-            otk_re = re.compile('-- key (?P<otk>[a-zA-Z0-9]{32})')
-            otk = otk_re.search(matches['title'] or '')
-            if otk:
-                self.db.confirm_registration(otk.group('otk'))
-                subject = 'Your registration to %s is complete' % \
-                          config['TRACKER_NAME']
-                sendto = [from_list[0][1]]
-                self.mailer.standard_message(sendto, subject, '')
-                return
+    def create_files(self):
+        ''' Create a file for each attachment in the message
+        '''
+        if not self.properties.has_key('files'):
+            return
+        files = []
+        file_props = self.mailgw.get_class_arguments('file')
+        
+        if self.attachments:
+            for (name, mime_type, data) in self.attachments:
+                if not self.db.security.hasPermission('Create', self.author,
+                    'file'):
+                    raise Unauthorized, _(
+                        'You are not permitted to create files.')
+                if not name:
+                    name = "unnamed"
+                try:
+                    fileid = self.db.file.create(type=mime_type, name=name,
+                         content=data, **file_props)
+                except exceptions.Reject:
+                    pass
+                else:
+                    files.append(fileid)
+            # allowed to attach the files to an existing node?
+            if self.nodeid and not self.db.security.hasPermission('Edit',
+                    self.author, self.classname, 'files'):
+                raise Unauthorized, _(
+                    'You are not permitted to add files to %(classname)s.'
+                    ) % self.__dict__
 
-        # get the classname
-        if pfxmode == 'none':
-            classname = None
-        else:
-            classname = matches['classname']
+            self.msg_props['files'] = files
+            if self.nodeid:
+                # extend the existing files list
+                fileprop = self.cl.get(self.nodeid, 'files')
+                fileprop.extend(files)
+                files = fileprop
 
-        if not classname and has_prefix and pfxmode == 'strict':
-            raise MailUsageError, _("""
-The message you sent to roundup did not contain a properly formed subject
-line. The subject must contain a class name or designator to indicate the
-'topic' of the message. For example:
-    Subject: [issue] This is a new issue
-      - this will create a new issue in the tracker with the title 'This is
-        a new issue'.
-    Subject: [issue1234] This is a followup to issue 1234
-      - this will append the message's contents to the existing issue 1234
-        in the tracker.
+            self.props['files'] = files
 
-Subject was: '%(subject)s'
-""") % locals()
+    def create_msg(self):
+        ''' Create msg containing all the relevant information from the message
+        '''
+        if not self.properties.has_key('messages'):
+            return
+        msg_props = self.mailgw.get_class_arguments('msg')
+        self.msg_props.update (msg_props)
+        
+        # Get the message ids
+        inreplyto = self.message.getheader('in-reply-to') or ''
+        messageid = self.message.getheader('message-id')
+        # generate a messageid if there isn't one
+        if not messageid:
+            messageid = "<%s.%s.%s%s@%s>"%(time.time(), random.random(),
+                self.classname, self.nodeid, self.config['MAIL_DOMAIN'])
+        
+        if self.content is None:
+            raise MailUsageError, _("""
+Roundup requires the submission to be plain text. The message parser could
+not find a text/plain part to use.
+""")
 
-        # try to get the class specified - if "loose" or "none" then fall
-        # back on the default
-        attempts = []
-        if classname:
-            attempts.append(classname)
+        # parse the body of the message, stripping out bits as appropriate
+        summary, content = parseContent(self.content, config=self.config)
+        content = content.strip()
 
-        if self.default_class:
-            attempts.append(self.default_class)
-        else:
-            attempts.append(config['MAILGW_DEFAULT_CLASS'])
+        if content:
+            if not self.db.security.hasPermission('Create', self.author, 'msg'):
+                raise Unauthorized, _(
+                    'You are not permitted to create messages.')
 
-        # first valid class name wins
-        cl = None
-        for trycl in attempts:
             try:
-                cl = self.db.getclass(trycl)
-                classname = trycl
-                break
-            except KeyError:
-                pass
-
-        if not cl:
-            validname = ', '.join(self.db.getclasses())
-            if classname:
+                message_id = self.db.msg.create(author=self.author,
+                    recipients=self.recipients, date=date.Date('.'),
+                    summary=summary, content=content,
+                    messageid=messageid, inreplyto=inreplyto, **self.msg_props)
+            except exceptions.Reject, error:
                 raise MailUsageError, _("""
-The class name you identified in the subject line ("%(classname)s") does
-not exist in the database.
-
-Valid class names are: %(validname)s
-Subject was: "%(subject)s"
+Mail message was rejected by a detector.
+%(error)s
 """) % locals()
+            # allowed to attach the message to the existing node?
+            if self.nodeid and not self.db.security.hasPermission('Edit',
+                    self.author, self.classname, 'messages'):
+                raise Unauthorized, _(
+                    'You are not permitted to add messages to %(classname)s.'
+                    ) % self.__dict__
+
+            if self.nodeid:
+                # add the message to the node's list
+                messages = self.cl.get(self.nodeid, 'messages')
+                messages.append(message_id)
+                self.props['messages'] = messages
             else:
-                raise MailUsageError, _("""
-You did not identify a class name in the subject line and there is no
-default set for this tracker. The subject must contain a class name or
-designator to indicate the 'topic' of the message. For example:
-    Subject: [issue] This is a new issue
-      - this will create a new issue in the tracker with the title 'This is
-        a new issue'.
-    Subject: [issue1234] This is a followup to issue 1234
-      - this will append the message's contents to the existing issue 1234
-        in the tracker.
+                # pre-load the messages list
+                self.props['messages'] = [message_id]
 
-Subject was: '%(subject)s'
+    def create_node(self):
+        ''' Create/update a node using self.props 
+        '''
+        classname = self.classname
+        try:
+            if self.nodeid:
+                # Check permissions for each property
+                for prop in self.props.keys():
+                    if not self.db.security.hasPermission('Edit', self.author,
+                            classname, prop):
+                        raise Unauthorized, _('You are not permitted to edit '
+                            'property %(prop)s of class %(classname)s.'
+                            ) % locals()
+                self.cl.set(self.nodeid, **self.props)
+            else:
+                # Check permissions for each property
+                for prop in self.props.keys():
+                    if not self.db.security.hasPermission('Create', self.author,
+                            classname, prop):
+                        raise Unauthorized, _('You are not permitted to set '
+                            'property %(prop)s of class %(classname)s.'
+                            ) % locals()
+                self.nodeid = self.cl.create(**self.props)
+        except (TypeError, IndexError, ValueError, exceptions.Reject), message:
+            raise MailUsageError, _("""
+There was a problem with the message you sent:
+   %(message)s
 """) % locals()
 
-        # get the optional nodeid
-        if pfxmode == 'none':
-            nodeid = None
-        else:
-            nodeid = matches['nodeid']
+        return self.nodeid
 
-        # try in-reply-to to match the message if there's no nodeid
-        inreplyto = message.getheader('in-reply-to') or ''
-        if nodeid is None and inreplyto:
-            l = self.db.getclass('msg').stringFind(messageid=inreplyto)
-            if l:
-                nodeid = cl.filter(None, {'messages':l})[0]
 
-        # title is optional too
-        title = matches['title']
-        if title:
-            title = title.strip()
-        else:
-            title = ''
 
-        # strip off the quotes that dumb emailers put around the subject, like
-        #      Re: "[issue1] bla blah"
-        if matches['quote'] and title.endswith('"'):
-            title = title[:-1]
+class MailGW:
 
-        # but we do need either a title or a nodeid...
-        if nodeid is None and not title:
-            raise MailUsageError, _("""
-I cannot match your message to a node in the database - you need to either
-supply a full designator (with number, eg "[issue123]") or keep the
-previous subject title intact so I can match that.
+    # To override the message parsing, derive your own class from
+    # parsedMessage and assign to parsed_message_class in a derived
+    # class of MailGW
+    parsed_message_class = parsedMessage
 
-Subject was: "%(subject)s"
-""") % locals()
+    def __init__(self, instance, arguments=()):
+        self.instance = instance
+        self.arguments = arguments
+        self.default_class = None
+        for option, value in self.arguments:
+            if option == '-c':
+                self.default_class = value.strip()
 
-        # If there's no nodeid, check to see if this is a followup and
-        # maybe someone's responded to the initial mail that created an
-        # entry. Try to find the matching nodes with the same title, and
-        # use the _last_ one matched (since that'll _usually_ be the most
-        # recent...). The subject_content_match config may specify an
-        # additional restriction based on the matched node's creation or
-        # activity.
-        tmatch_mode = config['MAILGW_SUBJECT_CONTENT_MATCH']
-        if tmatch_mode != 'never' and nodeid is None and matches['refwd']:
-            l = cl.stringFind(title=title)
-            limit = None
-            if (tmatch_mode.startswith('creation') or
-                    tmatch_mode.startswith('activity')):
-                limit, interval = tmatch_mode.split(' ', 1)
-                threshold = date.Date('.') - date.Interval(interval)
-            for id in l:
-                if limit:
-                    if threshold < cl.get(id, limit):
-                        nodeid = id
-                else:
-                    nodeid = id
+        self.mailer = Mailer(instance.config)
+        self.logger = logging.getLogger('roundup.mailgw')
 
-        # if a nodeid was specified, make sure it's valid
-        if nodeid is not None and not cl.hasnode(nodeid):
-            if pfxmode == 'strict':
-                raise MailUsageError, _("""
-The node specified by the designator in the subject of your message
-("%(nodeid)s") does not exist.
+        # should we trap exceptions (normal usage) or pass them through
+        # (for testing)
+        self.trapExceptions = 1
 
-Subject was: "%(subject)s"
-""") % locals()
+    def do_pipe(self):
+        """ Read a message from standard input and pass it to the mail handler.
+
+            Read into an internal structure that we can seek on (in case
+            there's an error).
+
+            XXX: we may want to read this into a temporary file instead...
+        """
+        s = cStringIO.StringIO()
+        s.write(sys.stdin.read())
+        s.seek(0)
+        self.main(s)
+        return 0
+
+    def do_mailbox(self, filename):
+        """ Read a series of messages from the specified unix mailbox file and
+            pass each to the mail handler.
+        """
+        # open the spool file and lock it
+        import fcntl
+        # FCNTL is deprecated in py2.3 and fcntl takes over all the symbols
+        if hasattr(fcntl, 'LOCK_EX'):
+            FCNTL = fcntl
+        else:
+            import FCNTL
+        f = open(filename, 'r+')
+        fcntl.flock(f.fileno(), FCNTL.LOCK_EX)
+
+        # handle and clear the mailbox
+        try:
+            from mailbox import UnixMailbox
+            mailbox = UnixMailbox(f, factory=Message)
+            # grab one message
+            message = mailbox.next()
+            while message:
+                # handle this message
+                self.handle_Message(message)
+                message = mailbox.next()
+            # nuke the file contents
+            os.ftruncate(f.fileno(), 0)
+        except:
+            import traceback
+            traceback.print_exc()
+            return 1
+        fcntl.flock(f.fileno(), FCNTL.LOCK_UN)
+        return 0
+
+    def do_imap(self, server, user='', password='', mailbox='', ssl=0,
+            cram=0):
+        ''' Do an IMAP connection
+        '''
+        import getpass, imaplib, socket
+        try:
+            if not user:
+                user = raw_input('User: ')
+            if not password:
+                password = getpass.getpass()
+        except (KeyboardInterrupt, EOFError):
+            # Ctrl C or D maybe also Ctrl Z under Windows.
+            print "\nAborted by user."
+            return 1
+        # open a connection to the server and retrieve all messages
+        try:
+            if ssl:
+                self.logger.debug('Trying server %r with ssl'%server)
+                server = imaplib.IMAP4_SSL(server)
             else:
-                title = subject
-                nodeid = None
+                self.logger.debug('Trying server %r without ssl'%server)
+                server = imaplib.IMAP4(server)
+        except (imaplib.IMAP4.error, socket.error, socket.sslerror):
+            self.logger.exception('IMAP server error')
+            return 1
 
-        # Handle the arguments specified by the email gateway command line.
-        # We do this by looping over the list of self.arguments looking for
-        # a -C to tell us what class then the -S setting string.
-        msg_props = {}
-        user_props = {}
-        file_props = {}
-        issue_props = {}
-        # so, if we have any arguments, use them
-        if self.arguments:
-            current_class = 'msg'
-            for option, propstring in self.arguments:
-                if option in ( '-C', '--class'):
-                    current_class = propstring.strip()
-                    # XXX this is not flexible enough.
-                    #   we should chect for subclasses of these classes,
-                    #   not for the class name...
-                    if current_class not in ('msg', 'file', 'user', 'issue'):
-                        mailadmin = config['ADMIN_EMAIL']
-                        raise MailUsageError, _("""
-The mail gateway is not properly set up. Please contact
-%(mailadmin)s and have them fix the incorrect class specified as:
-  %(current_class)s
-""") % locals()
-                if option in ('-S', '--set'):
-                    if current_class == 'issue' :
-                        errors, issue_props = setPropArrayFromString(self,
-                            cl, propstring.strip(), nodeid)
-                    elif current_class == 'file' :
-                        temp_cl = self.db.getclass('file')
-                        errors, file_props = setPropArrayFromString(self,
-                            temp_cl, propstring.strip())
-                    elif current_class == 'msg' :
-                        temp_cl = self.db.getclass('msg')
-                        errors, msg_props = setPropArrayFromString(self,
-                            temp_cl, propstring.strip())
-                    elif current_class == 'user' :
-                        temp_cl = self.db.getclass('user')
-                        errors, user_props = setPropArrayFromString(self,
-                            temp_cl, propstring.strip())
-                    if errors:
-                        mailadmin = config['ADMIN_EMAIL']
-                        raise MailUsageError, _("""
-The mail gateway is not properly set up. Please contact
-%(mailadmin)s and have them fix the incorrect properties:
-  %(errors)s
-""") % locals()
+        try:
+            if cram:
+                server.login_cram_md5(user, password)
+            else:
+                server.login(user, password)
+        except imaplib.IMAP4.error, e:
+            self.logger.exception('IMAP login failure')
+            return 1
 
-        #
-        # handle the users
-        #
-        # Don't create users if anonymous isn't allowed to register
-        create = 1
-        anonid = self.db.user.lookup('anonymous')
-        if not (self.db.security.hasPermission('Create', anonid, 'user')
-                and self.db.security.hasPermission('Email Access', anonid)):
-            create = 0
+        try:
+            if not mailbox:
+                (typ, data) = server.select()
+            else:
+                (typ, data) = server.select(mailbox=mailbox)
+            if typ != 'OK':
+                self.logger.error('Failed to get mailbox %r: %s'%(mailbox,
+                    data))
+                return 1
+            try:
+                numMessages = int(data[0])
+            except ValueError, value:
+                self.logger.error('Invalid message count from mailbox %r'%
+                    data[0])
+                return 1
+            for i in range(1, numMessages+1):
+                (typ, data) = server.fetch(str(i), '(RFC822)')
+
+                # mark the message as deleted.
+                server.store(str(i), '+FLAGS', r'(\Deleted)')
+
+                # process the message
+                s = cStringIO.StringIO(data[0][1])
+                s.seek(0)
+                self.handle_Message(Message(s))
+            server.close()
+        finally:
+            try:
+                server.expunge()
+            except:
+                pass
+            server.logout()
+
+        return 0
+
+
+    def do_apop(self, server, user='', password='', ssl=False):
+        ''' Do authentication POP
+        '''
+        self._do_pop(server, user, password, True, ssl)
+
+    def do_pop(self, server, user='', password='', ssl=False):
+        ''' Do plain POP
+        '''
+        self._do_pop(server, user, password, False, ssl)
+
+    def _do_pop(self, server, user, password, apop, ssl):
+        '''Read a series of messages from the specified POP server.
+        '''
+        import getpass, poplib, socket
+        try:
+            if not user:
+                user = raw_input('User: ')
+            if not password:
+                password = getpass.getpass()
+        except (KeyboardInterrupt, EOFError):
+            # Ctrl C or D maybe also Ctrl Z under Windows.
+            print "\nAborted by user."
+            return 1
+
+        # open a connection to the server and retrieve all messages
+        try:
+            if ssl:
+                klass = poplib.POP3_SSL
+            else:
+                klass = poplib.POP3
+            server = klass(server)
+        except socket.error:
+            self.logger.exception('POP server error')
+            return 1
+        if apop:
+            server.apop(user, password)
+        else:
+            server.user(user)
+            server.pass_(password)
+        numMessages = len(server.list()[1])
+        for i in range(1, numMessages+1):
+            # retr: returns
+            # [ pop response e.g. '+OK 459 octets',
+            #   [ array of message lines ],
+            #   number of octets ]
+            lines = server.retr(i)[1]
+            s = cStringIO.StringIO('\n'.join(lines))
+            s.seek(0)
+            self.handle_Message(Message(s))
+            # delete the message
+            server.dele(i)
+
+        # quit the server to commit changes.
+        server.quit()
+        return 0
 
-        # ok, now figure out who the author is - create a new user if the
-        # "create" flag is true
-        author = uidFromAddress(self.db, from_list[0], create=create)
+    def main(self, fp):
+        ''' fp - the file from which to read the Message.
+        '''
+        return self.handle_Message(Message(fp))
 
-        # if we're not recognised, and we don't get added as a user, then we
-        # must be anonymous
-        if not author:
-            author = anonid
+    def handle_Message(self, message):
+        """Handle an RFC822 Message
 
-        # make sure the author has permission to use the email interface
-        if not self.db.security.hasPermission('Email Access', author):
-            if author == anonid:
-                # we're anonymous and we need to be a registered user
-                from_address = from_list[0][1]
-                registration_info = ""
-                if self.db.security.hasPermission('Web Access', author) and \
-                   self.db.security.hasPermission('Create', anonid, 'user'):
-                    tracker_web = self.instance.config.TRACKER_WEB
-                    registration_info = """ Please register at:
+        Handle the Message object by calling handle_message() and then cope
+        with any errors raised by handle_message.
+        This method's job is to make that call and handle any
+        errors in a sane manner. It should be replaced if you wish to
+        handle errors in a different manner.
+        """
+        # in some rare cases, a particularly stuffed-up e-mail will make
+        # its way into here... try to handle it gracefully
 
-%(tracker_web)suser?template=register
+        sendto = message.getaddrlist('resent-from')
+        if not sendto:
+            sendto = message.getaddrlist('from')
+        if not sendto:
+            # very bad-looking message - we don't even know who sent it
+            msg = ['Badly formed message from mail gateway. Headers:']
+            msg.extend(message.headers)
+            msg = '\n'.join(map(str, msg))
+            self.logger.error(msg)
+            return
 
-...before sending mail to the tracker.""" % locals()
+        msg = 'Handling message'
+        if message.getheader('message-id'):
+            msg += ' (Message-id=%r)'%message.getheader('message-id')
+        self.logger.info(msg)
 
-                raise Unauthorized, _("""
-You are not a registered user.%(registration_info)s
+        # try normal message-handling
+        if not self.trapExceptions:
+            return self.handle_message(message)
 
-Unknown address: %(from_address)s
-""") % locals()
-            else:
-                # we're registered and we're _still_ not allowed access
-                raise Unauthorized, _(
-                    'You are not permitted to access this tracker.')
+        # no, we want to trap exceptions
+        try:
+            return self.handle_message(message)
+        except MailUsageHelp:
+            # bounce the message back to the sender with the usage message
+            fulldoc = '\n'.join(string.split(__doc__, '\n')[2:])
+            m = ['']
+            m.append('\n\nMail Gateway Help\n=================')
+            m.append(fulldoc)
+            self.mailer.bounce_message(message, [sendto[0][1]], m,
+                subject="Mail Gateway Help")
+        except MailUsageError, value:
+            # bounce the message back to the sender with the usage message
+            fulldoc = '\n'.join(string.split(__doc__, '\n')[2:])
+            m = ['']
+            m.append(str(value))
+            m.append('\n\nMail Gateway Help\n=================')
+            m.append(fulldoc)
+            self.mailer.bounce_message(message, [sendto[0][1]], m)
+        except Unauthorized, value:
+            # just inform the user that he is not authorized
+            m = ['']
+            m.append(str(value))
+            self.mailer.bounce_message(message, [sendto[0][1]], m)
+        except IgnoreMessage:
+            # do not take any action
+            # this exception is thrown when email should be ignored
+            msg = 'IgnoreMessage raised'
+            if message.getheader('message-id'):
+                msg += ' (Message-id=%r)'%message.getheader('message-id')
+            self.logger.info(msg)
+            return
+        except:
+            msg = 'Exception handling message'
+            if message.getheader('message-id'):
+                msg += ' (Message-id=%r)'%message.getheader('message-id')
+            self.logger.exception(msg)
 
-        # make sure they're allowed to edit or create this class of information
-        if nodeid:
-            if not self.db.security.hasPermission('Edit', author, classname,
-                    itemid=nodeid):
-                raise Unauthorized, _(
-                    'You are not permitted to edit %(classname)s.') % locals()
-        else:
-            if not self.db.security.hasPermission('Create', author, classname):
-                raise Unauthorized, _(
-                    'You are not permitted to create %(classname)s.'
-                    ) % locals()
+            # bounce the message back to the sender with the error message
+            # let the admin know that something very bad is happening
+            m = ['']
+            m.append('An unexpected error occurred during the processing')
+            m.append('of your message. The tracker administrator is being')
+            m.append('notified.\n')
+            self.mailer.bounce_message(message, [sendto[0][1]], m)
 
-        # the author may have been created - make sure the change is
-        # committed before we reopen the database
-        self.db.commit()
+            m.append('----------------')
+            m.append(traceback.format_exc())
+            self.mailer.bounce_message(message, [self.instance.config.ADMIN_EMAIL], m)
 
-        # set the database user as the author
-        username = self.db.user.get(author, 'username')
-        self.db.setCurrentUser(username)
+    def handle_message(self, message):
+        ''' message - a Message instance
 
-        # re-get the class with the new database connection
-        cl = self.db.getclass(classname)
+        Parse the message as per the module docstring.
+        '''
+        # get database handle for handling one email
+        self.db = self.instance.open ('admin')
+        try:
+            return self._handle_message(message)
+        finally:
+            self.db.close()
 
-        # now update the recipients list
-        recipients = []
-        tracker_email = config['TRACKER_EMAIL'].lower()
-        for recipient in message.getaddrlist('to') + message.getaddrlist('cc'):
-            r = recipient[1].strip().lower()
-            if r == tracker_email or not r:
-                continue
+    def _handle_message(self, message):
+        ''' message - a Message instance
 
-            # look up the recipient - create if necessary (and we're
-            # allowed to)
-            recipient = uidFromAddress(self.db, recipient, create, **user_props)
+        Parse the message as per the module docstring.
+        The following code expects an opened database and a try/finally
+        that closes the database.
+        '''
+        parsed_message = self.parsed_message_class(self, message)
 
-            # if all's well, add the recipient to the list
-            if recipient:
-                recipients.append(recipient)
+        # Filter out messages to ignore
+        parsed_message.handle_ignore()
+        
+        # Check for usage/help requests
+        parsed_message.handle_help()
+        
+        # Check if the subject line is valid
+        parsed_message.check_subject()
 
-        #
-        # handle the subject argument list
-        #
-        # figure what the properties of this Class are
-        properties = cl.getprops()
-        props = {}
-        args = matches['args']
-        argswhole = matches['argswhole']
-        if args:
-            if sfxmode == 'none':
-                title += ' ' + argswhole
-            else:
-                errors, props = setPropArrayFromString(self, cl, args, nodeid)
-                # handle any errors parsing the argument list
-                if errors:
-                    if sfxmode == 'strict':
-                        errors = '\n- '.join(map(str, errors))
-                        raise MailUsageError, _("""
-There were problems handling your subject line argument list:
-- %(errors)s
+        # XXX Don't enable. This doesn't work yet.
+        # XXX once this works it should be moved to parsedMessage class
+#  "[^A-z.]tracker\+(?P<classname>[^\d\s]+)(?P<nodeid>\d+)\@some.dom.ain[^A-z.]"
+        # handle delivery to addresses like:tracker+issue25 at some.dom.ain
+        # use the embedded issue number as our issue
+#            issue_re = config['MAILGW_ISSUE_ADDRESS_RE']
+#            if issue_re:
+#                for header in ['to', 'cc', 'bcc']:
+#                    addresses = message.getheader(header, '')
+#                if addresses:
+#                  # FIXME, this only finds the first match in the addresses.
+#                    issue = re.search(issue_re, addresses, 'i')
+#                    if issue:
+#                        classname = issue.group('classname')
+#                        nodeid = issue.group('nodeid')
+#                        break
 
-Subject was: "%(subject)s"
-""") % locals()
-                    else:
-                        title += ' ' + argswhole
+        # Parse the subject line to get the importants parts
+        parsed_message.parse_subject()
 
+        # check for registration OTK
+        if parsed_message.rego_confirm():
+            return
 
-        # set the issue title to the subject
-        title = title.strip()
-        if (title and properties.has_key('title') and not
-                issue_props.has_key('title')):
-            issue_props['title'] = title
+        # get the classname
+        parsed_message.get_classname()
 
-        #
-        # handle message-id and in-reply-to
-        #
-        messageid = message.getheader('message-id')
-        # generate a messageid if there isn't one
-        if not messageid:
-            messageid = "<%s.%s.%s%s@%s>"%(time.time(), random.random(),
-                classname, nodeid, config['MAIL_DOMAIN'])
+        # get the optional nodeid
+        parsed_message.get_nodeid()
 
-        # if they've enabled PGP processing then verify the signature
-        # or decrypt the message
+        # Determine who the author is
+        parsed_message.get_author_id()
+        
+        # make sure they're allowed to edit or create this class
+        parsed_message.check_node_permissions()
 
-        # if PGP_ROLES is specified the user must have a Role in the list
-        # or we will skip PGP processing
-        def pgp_role():
-            if self.instance.config.PGP_ROLES:
-                return user_has_role(self.db, author,
-                    self.instance.config.PGP_ROLES)
-            else:
-                return True
+        # author may have been created:
+        # commit author to database and re-open as author
+        parsed_message.commit_and_reopen_as_author()
 
-        if self.instance.config.PGP_ENABLE and pgp_role():
-            assert pyme, 'pyme is not installed'
-            # signed/encrypted mail must come from the primary address
-            author_address = self.db.user.get(author, 'address')
-            if self.instance.config.PGP_HOMEDIR:
-                os.environ['GNUPGHOME'] = self.instance.config.PGP_HOMEDIR
-            if message.pgp_signed():
-                message.verify_signature(author_address)
-            elif message.pgp_encrypted():
-                # replace message with the contents of the decrypted
-                # message for content extraction
-                # TODO: encrypted message handling is far from perfect
-                # bounces probably include the decrypted message, for
-                # instance :(
-                message = message.decrypt(author_address)
-            else:
-                raise MailUsageError, _("""
-This tracker has been configured to require all email be PGP signed or
-encrypted.""")
-        # now handle the body - find the message
-        ig = self.instance.config.MAILGW_IGNORE_ALTERNATIVES
-        content, attachments = message.extract_content(ignore_alternatives = ig)
-        if content is None:
-            raise MailUsageError, _("""
-Roundup requires the submission to be plain text. The message parser could
-not find a text/plain part to use.
-""")
+        # Get the recipients list
+        parsed_message.get_recipients()
 
-        # parse the body of the message, stripping out bits as appropriate
-        summary, content = parseContent(content, config=config)
-        content = content.strip()
+        # get the new/updated node props
+        parsed_message.get_props()
 
-        #
-        # handle the attachments
-        #
-        if properties.has_key('files'):
-            files = []
-            for (name, mime_type, data) in attachments:
-                if not self.db.security.hasPermission('Create', author, 'file'):
-                    raise Unauthorized, _(
-                        'You are not permitted to create files.')
-                if not name:
-                    name = "unnamed"
-                try:
-                    fileid = self.db.file.create(type=mime_type, name=name,
-                         content=data, **file_props)
-                except exceptions.Reject:
-                    pass
-                else:
-                    files.append(fileid)
-            # attach the files to the issue
-            if not self.db.security.hasPermission('Edit', author,
-                    classname, 'files'):
-                raise Unauthorized, _(
-                    'You are not permitted to add files to %(classname)s.'
-                    ) % locals()
+        # Handle PGP signed or encrypted messages
+        parsed_message.get_pgp_message()
 
-            if nodeid:
-                # extend the existing files list
-                fileprop = cl.get(nodeid, 'files')
-                fileprop.extend(files)
-                props['files'] = fileprop
-            else:
-                # pre-load the files list
-                props['files'] = files
+        # extract content and attachments from message body
+        parsed_message.get_content_and_attachments()
 
-        #
+        # put attachments into files linked to the issue
+        parsed_message.create_files()
+        
         # create the message if there's a message body (content)
-        #
-        if (content and properties.has_key('messages')):
-            if not self.db.security.hasPermission('Create', author, 'msg'):
-                raise Unauthorized, _(
-                    'You are not permitted to create messages.')
+        parsed_message.create_msg()
+            
+        # perform the node change / create
+        nodeid = parsed_message.create_node()
 
-            try:
-                message_id = self.db.msg.create(author=author,
-                    recipients=recipients, date=date.Date('.'),
-                    summary=summary, content=content, files=files,
-                    messageid=messageid, inreplyto=inreplyto, **msg_props)
-            except exceptions.Reject, error:
-                raise MailUsageError, _("""
-Mail message was rejected by a detector.
-%(error)s
-""") % locals()
-            # attach the message to the node
-            if not self.db.security.hasPermission('Edit', author,
-                    classname, 'messages'):
-                raise Unauthorized, _(
-                    'You are not permitted to add messages to %(classname)s.'
-                    ) % locals()
+        # commit the changes to the DB
+        self.db.commit()
 
-            if nodeid:
-                # add the message to the node's list
-                messages = cl.get(nodeid, 'messages')
-                messages.append(message_id)
-                props['messages'] = messages
-            else:
-                # pre-load the messages list
-                props['messages'] = [message_id]
+        return nodeid
 
-        #
-        # perform the node change / create
-        #
-        try:
-            # merge the command line props defined in issue_props into
-            # the props dictionary because function(**props, **issue_props)
-            # is a syntax error.
-            for prop in issue_props.keys() :
-                if not props.has_key(prop) :
-                    props[prop] = issue_props[prop]
-
-            # Check permissions for each property
-            for prop in props.keys():
-                if not self.db.security.hasPermission('Edit', author,
-                        classname, prop):
-                    raise Unauthorized, _('You are not permitted to edit '
-                        'property %(prop)s of class %(classname)s.') % locals()
+    def get_class_arguments(self, class_type, classname=None):
+        ''' class_type - a valid node class type:
+                - 'user' refers to the author of a message
+                - 'issue' refers to an issue-type class (to which the
+                  message is appended) specified in parameter classname
+                  Note that this need not be the real classname, we get
+                  the real classname used as a parameter (from previous
+                  message-parsing steps)
+                - 'file' specifies a file-type class
+                - 'msg' is the message-class
+            classname - the name of the current issue-type class
+
+        Parse the commandline arguments and retrieve the properties that
+        are relevant to the class_type. We now allow multiple -S options
+        per class_type (-C option).
+        '''
+        allprops = {}
 
-            if nodeid:
-                cl.set(nodeid, **props)
-            else:
-                nodeid = cl.create(**props)
-        except (TypeError, IndexError, ValueError, exceptions.Reject), message:
+        classname = classname or class_type
+        cls_lookup = { 'issue' : classname }
+        
+        # Allow other issue-type classes -- take the real classname from
+        # previous parsing-steps of the message:
+        clsname = cls_lookup.get (class_type, class_type)
+
+        # check if the clsname is valid
+        try:
+            self.db.getclass(clsname)
+        except KeyError:
+            mailadmin = self.instance.config['ADMIN_EMAIL']
             raise MailUsageError, _("""
-There was a problem with the message you sent:
-   %(message)s
+The mail gateway is not properly set up. Please contact
+%(mailadmin)s and have them fix the incorrect class specified as:
+  %(clsname)s
 """) % locals()
+        
+        if self.arguments:
+            # The default type on the commandline is msg
+            if class_type == 'msg':
+                current_type = class_type
+            else:
+                current_type = None
+            
+            # Handle the arguments specified by the email gateway command line.
+            # We do this by looping over the list of self.arguments looking for
+            # a -C to match the class we want, then use the -S setting string.
+            for option, propstring in self.arguments:
+                if option in ( '-C', '--class'):
+                    current_type = propstring.strip()
+                    
+                    if current_type != class_type:
+                        current_type = None
+
+                elif current_type and option in ('-S', '--set'):
+                    cls = cls_lookup.get (current_type, current_type)
+                    temp_cl = self.db.getclass(cls)
+                    errors, props = setPropArrayFromString(self,
+                        temp_cl, propstring.strip())
 
-        # commit the changes to the DB
-        self.db.commit()
+                    if errors:
+                        mailadmin = self.instance.config['ADMIN_EMAIL']
+                        raise MailUsageError, _("""
+The mail gateway is not properly set up. Please contact
+%(mailadmin)s and have them fix the incorrect properties:
+  %(errors)s
+""") % locals()
+                    allprops.update(props)
 
-        return nodeid
+        return allprops
 
 
 def setPropArrayFromString(self, cl, propString, nodeid=None):
@@ -1484,7 +1696,7 @@
         try:
             return db.user.create(username=trying, address=address,
                 realname=realname, roles=db.config.NEW_EMAIL_USER_ROLES,
-                password=password.Password(password.generatePassword()),
+                password=password.Password(password.generatePassword(), config=db.config),
                 **user_props)
         except exceptions.Reject:
             return 0

Modified: tracker/roundup-src/roundup/password.py
==============================================================================
--- tracker/roundup-src/roundup/password.py	(original)
+++ tracker/roundup-src/roundup/password.py	Thu Aug  4 15:46:52 2011
@@ -22,22 +22,135 @@
 __docformat__ = 'restructuredtext'
 
 import re, string, random
-from roundup.anypy.hashlib_ import md5, sha1
+from base64 import b64encode, b64decode
+from roundup.anypy.hashlib_ import md5, sha1, shamodule
 try:
     import crypt
 except ImportError:
     crypt = None
 
+_bempty = ""
+_bjoin = _bempty.join
+
+def getrandbytes(count):
+    return _bjoin(chr(random.randint(0,255)) for i in xrange(count))
+
+#NOTE: PBKDF2 hash is using this variant of base64 to minimize encoding size,
+#      and have charset that's compatible w/ unix crypt variants
+def h64encode(data):
+    """encode using variant of base64"""
+    return b64encode(data, "./").strip("=\n")
+
+def h64decode(data):
+    """decode using variant of base64"""
+    off = len(data) % 4
+    if off == 0:
+        return b64decode(data, "./")
+    elif off == 1:
+        raise ValueError("invalid bas64 input")
+    elif off == 2:
+        return b64decode(data + "==", "./")
+    else:
+        return b64decode(data + "=", "./")
+
+try:
+    from M2Crypto.EVP import pbkdf2 as _pbkdf2
+except ImportError:
+    #no m2crypto - make our own pbkdf2 function
+    from struct import pack
+    from hmac import HMAC
+
+    def xor_bytes(left, right):
+        "perform bitwise-xor of two byte-strings"
+        return _bjoin(chr(ord(l) ^ ord(r)) for l, r in zip(left, right))
+
+    def _pbkdf2(password, salt, rounds, keylen):
+        digest_size = 20 # sha1 generates 20-byte blocks
+        total_blocks = int((keylen+digest_size-1)/digest_size)
+        hmac_template = HMAC(password, None, shamodule)
+        out = _bempty
+        for i in xrange(1, total_blocks+1):
+            hmac = hmac_template.copy()
+            hmac.update(salt + pack(">L",i))
+            block = tmp = hmac.digest()
+            for j in xrange(rounds-1):
+                hmac = hmac_template.copy()
+                hmac.update(tmp)
+                tmp = hmac.digest()
+                #TODO: need to speed up this call
+                block = xor_bytes(block, tmp)
+            out += block
+        return out[:keylen]
+
+def pbkdf2(password, salt, rounds, keylen):
+    """pkcs#5 password-based key derivation v2.0
+
+    :arg password: passphrase to use to generate key (if unicode, converted to utf-8)
+    :arg salt: salt string to use when generating key (if unicode, converted to utf-8)
+    :param rounds: number of rounds to use to generate key
+    :arg keylen: number of bytes to generate
+
+    If M2Crypto is present, uses it's implementation as backend.
+
+    :returns:
+        raw bytes of generated key
+    """
+    if isinstance(password, unicode):
+        password = password.encode("utf-8")
+    if isinstance(salt, unicode):
+        salt = salt.encode("utf-8")
+    if keylen > 40:
+        #NOTE: pbkdf2 allows up to (2**31-1)*20 bytes,
+        # but m2crypto has issues on some platforms above 40,
+        # and such sizes aren't needed for a password hash anyways...
+        raise ValueError, "key length too large"
+    if rounds < 1:
+        raise ValueError, "rounds must be positive number"
+    return _pbkdf2(password, salt, rounds, keylen)
+
 class PasswordValueError(ValueError):
     """ The password value is not valid """
     pass
 
-def encodePassword(plaintext, scheme, other=None):
+def pbkdf2_unpack(pbkdf2):
+    """ unpack pbkdf2 encrypted password into parts,
+        assume it has format "{rounds}${salt}${digest}
+    """
+    if isinstance(pbkdf2, unicode):
+        pbkdf2 = pbkdf2.encode("ascii")
+    try:
+        rounds, salt, digest = pbkdf2.split("$")
+    except ValueError:
+        raise PasswordValueError, "invalid PBKDF2 hash (wrong number of separators)"
+    if rounds.startswith("0"):
+        raise PasswordValueError, "invalid PBKDF2 hash (zero-padded rounds)"
+    try:
+        rounds = int(rounds)
+    except ValueError:
+        raise PasswordValueError, "invalid PBKDF2 hash (invalid rounds)"
+    raw_salt = h64decode(salt)
+    return rounds, salt, raw_salt, digest
+
+def encodePassword(plaintext, scheme, other=None, config=None):
     """Encrypt the plaintext password.
     """
     if plaintext is None:
         plaintext = ""
-    if scheme == 'SHA':
+    if scheme == "PBKDF2":
+        if other:
+            rounds, salt, raw_salt, digest = pbkdf2_unpack(other)
+        else:
+            raw_salt = getrandbytes(20)
+            salt = h64encode(raw_salt)
+            if config:
+                rounds = config.PASSWORD_PBKDF2_DEFAULT_ROUNDS
+            else:
+                rounds = 10000
+        if rounds < 1000:
+            raise PasswordValueError, "invalid PBKDF2 hash (rounds too low)"
+        raw_digest = pbkdf2(plaintext, raw_salt, rounds, 20)
+        return "%d$%s$%s" % (rounds, salt, h64encode(raw_digest))
+    elif scheme == 'SHA':
         s = sha1(plaintext).hexdigest()
     elif scheme == 'MD5':
         s = md5(plaintext).hexdigest()
@@ -58,7 +171,49 @@
     chars = string.letters+string.digits
     return ''.join([random.choice(chars) for x in range(length)])
 
-class Password:
+class JournalPassword:
+    """ Password dummy instance intended for journal operation.
+        We do not store passwords in the journal any longer.  The dummy
+        version only reads the encryption scheme from the given
+        encrypted password.
+    """
+    default_scheme = 'PBKDF2'        # new encryptions use this scheme
+    pwre = re.compile(r'{(\w+)}(.+)')
+
+    def __init__ (self, encrypted=''):
+        if isinstance(encrypted, self.__class__):
+            self.scheme = encrypted.scheme or self.default_scheme
+        else:
+            m = self.pwre.match(encrypted)
+            if m:
+                self.scheme = m.group(1)
+            else:
+                self.scheme = self.default_scheme
+        self.password = ''
+
+    def dummystr(self):
+        """ return dummy string to store in journal
+            - reports scheme, but nothing else
+        """
+        return "{%s}*encrypted*" % (self.scheme,)
+
+    __str__ = dummystr
+
+    def __cmp__(self, other):
+        """Compare this password against another password."""
+        # check to see if we're comparing instances
+        if isinstance(other, self.__class__):
+            if self.scheme != other.scheme:
+                return cmp(self.scheme, other.scheme)
+            return cmp(self.password, other.password)
+
+        # assume password is plaintext
+        if self.password is None:
+            raise ValueError, 'Password not set'
+        return cmp(self.password, encodePassword(other, self.scheme,
+            self.password or None))
+
+class Password(JournalPassword):
     """The class encapsulates a Password property type value in the database.
 
     The encoding of the password is one if None, 'SHA', 'MD5' or 'plaintext'.
@@ -80,24 +235,36 @@
     >>> 'not sekrit' != p
     1
     """
+    #TODO: code to migrate from old password schemes.
 
-    default_scheme = 'SHA'        # new encryptions use this scheme
-    pwre = re.compile(r'{(\w+)}(.+)')
+    deprecated_schemes = ["SHA", "MD5", "crypt", "plaintext"]
+    known_schemes = ["PBKDF2"] + deprecated_schemes
 
-    def __init__(self, plaintext=None, scheme=None, encrypted=None):
+    def __init__(self, plaintext=None, scheme=None, encrypted=None, strict=False, config=None):
         """Call setPassword if plaintext is not None."""
         if scheme is None:
             scheme = self.default_scheme
         if plaintext is not None:
-            self.setPassword (plaintext, scheme)
+            self.setPassword (plaintext, scheme, config=config)
         elif encrypted is not None:
-            self.unpack(encrypted, scheme)
+            self.unpack(encrypted, scheme, strict=strict, config=config)
         else:
             self.scheme = self.default_scheme
             self.password = None
             self.plaintext = None
 
-    def unpack(self, encrypted, scheme=None):
+    def needs_migration(self):
+        """ Password has insecure scheme or other insecure parameters
+            and needs migration to new password scheme
+        """
+        if self.scheme in self.deprecated_schemes:
+            return True
+        rounds, salt, raw_salt, digest = pbkdf2_unpack(self.password)
+        if rounds < 1000:
+            return True
+        return False
+
+    def unpack(self, encrypted, scheme=None, strict=False, config=None):
         """Set the password info from the scheme:<encryted info> string
            (the inverse of __str__)
         """
@@ -108,30 +275,18 @@
             self.plaintext = None
         else:
             # currently plaintext - encrypt
-            self.setPassword(encrypted, scheme)
+            self.setPassword(encrypted, scheme, config=config)
+        if strict and self.scheme not in self.known_schemes:
+            raise PasswordValueError, "unknown encryption scheme: %r" % (self.scheme,)
 
-    def setPassword(self, plaintext, scheme=None):
+    def setPassword(self, plaintext, scheme=None, config=None):
         """Sets encrypts plaintext."""
         if scheme is None:
             scheme = self.default_scheme
         self.scheme = scheme
-        self.password = encodePassword(plaintext, scheme)
+        self.password = encodePassword(plaintext, scheme, config=config)
         self.plaintext = plaintext
 
-    def __cmp__(self, other):
-        """Compare this password against another password."""
-        # check to see if we're comparing instances
-        if isinstance(other, Password):
-            if self.scheme != other.scheme:
-                return cmp(self.scheme, other.scheme)
-            return cmp(self.password, other.password)
-
-        # assume password is plaintext
-        if self.password is None:
-            raise ValueError, 'Password not set'
-        return cmp(self.password, encodePassword(other, self.scheme,
-            self.password))
-
     def __str__(self):
         """Stringify the encrypted password for database storage."""
         if self.password is None:
@@ -158,6 +313,22 @@
     assert p == 'sekrit'
     assert p != 'not sekrit'
     assert 'sekrit' == p
+    assert 'not sekrit' != p
+
+    # PBKDF2 - low level function
+    from binascii import unhexlify
+    k = pbkdf2("password", "ATHENA.MIT.EDUraeburn", 1200, 32)
+    assert k == unhexlify("5c08eb61fdf71e4e4ec3cf6ba1f5512ba7e52ddbc5e5142f708a31e2e62b1e13")
+
+    # PBKDF2 - hash function
+    h = "5000$7BvbBq.EZzz/O0HuwX3iP.nAG3s$g3oPnFFaga2BJaX5PoPRljl4XIE"
+    assert encodePassword("sekrit", "PBKDF2", h) == h
+
+    # PBKDF2 - high level integration
+    p = Password('sekrit', 'PBKDF2')
+    assert p == 'sekrit'
+    assert p != 'not sekrit'
+    assert 'sekrit' == p
     assert 'not sekrit' != p
 
 if __name__ == '__main__':

Modified: tracker/roundup-src/roundup/roundupdb.py
==============================================================================
--- tracker/roundup-src/roundup/roundupdb.py	(original)
+++ tracker/roundup-src/roundup/roundupdb.py	Thu Aug  4 15:46:52 2011
@@ -31,11 +31,14 @@
 from email.MIMEText import MIMEText
 from email.MIMEBase import MIMEBase
 
+from anypy.email_ import FeedParser
+
 from roundup import password, date, hyperdb
 from roundup.i18n import _
 
 # MessageSendError is imported for backwards compatibility
-from roundup.mailer import Mailer, MessageSendError, encode_quopri
+from roundup.mailer import Mailer, MessageSendError, encode_quopri, \
+    nice_sender_header
 
 class Database:
 
@@ -100,8 +103,7 @@
             elif isinstance(proptype, hyperdb.Interval):
                 props[propname] = date.Interval(value)
             elif isinstance(proptype, hyperdb.Password):
-                props[propname] = password.Password()
-                props[propname].unpack(value)
+                props[propname] = password.Password(encrypted=value)
 
         # tag new user creation with 'admin'
         self.journaltag = 'admin'
@@ -136,7 +138,7 @@
         # Because getting a logger requires acquiring a lock, we want
         # to do it only once.
         if not hasattr(self, '__logger'):
-            self.__logger = logging.getLogger('hyperdb')
+            self.__logger = logging.getLogger('roundup.hyperdb')
 
         return self.__logger
 
@@ -178,7 +180,7 @@
     )
 
     # New methods:
-    def addmessage(self, nodeid, summary, text):
+    def addmessage(self, issueid, summary, text):
         """Add a message to an issue's mail spool.
 
         A new "msg" node is constructed using the current date, the user that
@@ -191,8 +193,8 @@
         appended to the "messages" field of the specified issue.
         """
 
-    def nosymessage(self, nodeid, msgid, oldvalues, whichnosy='nosy',
-            from_address=None, cc=[], bcc=[]):
+    def nosymessage(self, issueid, msgid, oldvalues, whichnosy='nosy',
+            from_address=None, cc=[], bcc=[], cc_emails = [], bcc_emails = []):
         """Send a message to the members of an issue's nosy list.
 
         The message is sent only to users on the nosy list who are not
@@ -211,6 +213,12 @@
         message to that may not be specified in the message's recipients
         list. These recipients will not be included in the To: or Cc:
         address lists.
+
+        The cc_emails and bcc_emails arguments take a list of additional
+        recipient email addresses (just the mail address not roundup users)
+        this can be useful for sending to additional email addresses which are no
+        roundup users. These arguments are currently not used by roundups
+        nosyreaction but can be used by customized (nosy-)reactors.
         """
         if msgid:
             authid = self.db.msg.get(msgid, 'author')
@@ -227,18 +235,29 @@
             seen_message[recipient] = 1
 
         def add_recipient(userid, to):
-            # make sure they have an address
+            """ make sure they have an address """
             address = self.db.user.get(userid, 'address')
             if address:
                 to.append(address)
                 recipients.append(userid)
 
         def good_recipient(userid):
-            # Make sure we don't send mail to either the anonymous
-            # user or a user who has already seen the message.
+            """ Make sure we don't send mail to either the anonymous
+                user or a user who has already seen the message.
+                Also check permissions on the message if not a system
+                message: A user must have view permission on content and
+                files to be on the receiver list. We do *not* check the
+                author etc. for now.
+            """
+            allowed = True
+            if msgid:
+                for prop in 'content', 'files':
+                    if prop in self.db.msg.properties:
+                        allowed = allowed and self.db.security.hasPermission(
+                            'View', userid, 'msg', prop, msgid)
             return (userid and
                     (self.db.user.get(userid, 'username') != 'anonymous') and
-                    not seen_message.has_key(userid))
+                    allowed and not seen_message.has_key(userid))
 
         # possibly send the message to the author, as long as they aren't
         # anonymous
@@ -251,34 +270,36 @@
             seen_message[authid] = 1
 
         # now deal with the nosy and cc people who weren't recipients.
-        for userid in cc + self.get(nodeid, whichnosy):
+        for userid in cc + self.get(issueid, whichnosy):
             if good_recipient(userid):
                 add_recipient(userid, sendto)
+        sendto.extend (cc_emails)
 
         # now deal with bcc people.
         for userid in bcc:
             if good_recipient(userid):
                 add_recipient(userid, bcc_sendto)
+        bcc_sendto.extend (bcc_emails)
 
         if oldvalues:
-            note = self.generateChangeNote(nodeid, oldvalues)
+            note = self.generateChangeNote(issueid, oldvalues)
         else:
-            note = self.generateCreateNote(nodeid)
+            note = self.generateCreateNote(issueid)
 
         # If we have new recipients, update the message's recipients
         # and send the mail.
         if sendto or bcc_sendto:
             if msgid is not None:
                 self.db.msg.set(msgid, recipients=recipients)
-            self.send_message(nodeid, msgid, note, sendto, from_address,
+            self.send_message(issueid, msgid, note, sendto, from_address,
                 bcc_sendto)
 
     # backwards compatibility - don't remove
     sendmessage = nosymessage
 
-    def send_message(self, nodeid, msgid, note, sendto, from_address=None,
+    def send_message(self, issueid, msgid, note, sendto, from_address=None,
             bcc_sendto=[], authid=None):
-        '''Actually send the nominated message from this node to the sendto
+        '''Actually send the nominated message from this issue to the sendto
            recipients, with the note appended.
         '''
         users = self.db.user
@@ -297,14 +318,14 @@
             # this is an old message that didn't get a messageid, so
             # create one
             messageid = "<%s.%s.%s%s@%s>"%(time.time(), random.random(),
-                                           self.classname, nodeid,
+                                           self.classname, issueid,
                                            self.db.config.MAIL_DOMAIN)
             if msgid is not None:
                 messages.set(msgid, messageid=messageid)
 
         # compose title
         cn = self.classname
-        title = self.get(nodeid, 'title') or '%s message copy'%cn
+        title = self.get(issueid, 'title') or '%s message copy'%cn
 
         # figure author information
         if authid:
@@ -331,11 +352,11 @@
 
         # put in roundup's signature
         if self.db.config.EMAIL_SIGNATURE_POSITION == 'top':
-            m.append(self.email_signature(nodeid, msgid))
+            m.append(self.email_signature(issueid, msgid))
 
         # add author information
         if authid and self.db.config.MAIL_ADD_AUTHORINFO:
-            if msgid and len(self.get(nodeid, 'messages')) == 1:
+            if msgid and len(self.get(issueid, 'messages')) == 1:
                 m.append(_("New submission from %(authname)s%(authaddr)s:")
                     % locals())
             elif msgid:
@@ -355,8 +376,7 @@
         if msgid :
             for fileid in messages.get(msgid, 'files') :
                 # check the attachment size
-                filename = self.db.filename('file', fileid, None)
-                filesize = os.path.getsize(filename)
+                filesize = self.db.filesize('file', fileid, None)
                 if filesize <= self.db.config.NOSY_MAX_ATTACHMENT_SIZE:
                     message_files.append(fileid)
                 else:
@@ -372,7 +392,7 @@
 
         # put in roundup's signature
         if self.db.config.EMAIL_SIGNATURE_POSITION == 'bottom':
-            m.append(self.email_signature(nodeid, msgid))
+            m.append(self.email_signature(issueid, msgid))
 
         # figure the encoding
         charset = getattr(self.db.config, 'EMAIL_CHARSET', 'utf-8')
@@ -392,7 +412,7 @@
         if from_tag:
             from_tag = ' ' + from_tag
 
-        subject = '[%s%s] %s'%(cn, nodeid, title)
+        subject = '[%s%s] %s'%(cn, issueid, title)
         author = (authname + from_tag, from_address)
 
         # send an individual message per recipient?
@@ -401,9 +421,10 @@
         else:
             sendto = [sendto]
 
+        # tracker sender info
         tracker_name = unicode(self.db.config.TRACKER_NAME, 'utf-8')
-        tracker_name = formataddr((tracker_name, from_address))
-        tracker_name = Header(tracker_name, charset)
+        tracker_name = nice_sender_header(tracker_name, from_address,
+            charset)
 
         # now send one or more messages
         # TODO: I believe we have to create a new message each time as we
@@ -435,12 +456,12 @@
                 if not 'name' in cl.getprops():
                     continue
                 if isinstance(prop, hyperdb.Link):
-                    value = self.get(nodeid, propname)
+                    value = self.get(issueid, propname)
                     if value is None:
                         continue
                     values = [value]
                 else:
-                    values = self.get(nodeid, propname)
+                    values = self.get(issueid, propname)
                     if not values:
                         continue
                 values = [cl.get(v, 'name') for v in values]
@@ -453,11 +474,11 @@
 
             if not inreplyto:
                 # Default the reply to the first message
-                msgs = self.get(nodeid, 'messages')
+                msgs = self.get(issueid, 'messages')
                 # Assume messages are sorted by increasing message number here
                 # If the issue is just being created, and the submitter didn't
                 # provide a message, then msgs will be empty.
-                if msgs and msgs[0] != nodeid:
+                if msgs and msgs[0] != msgid:
                     inreplyto = messages.get(msgs[0], 'messageid')
                     if inreplyto:
                         message['In-Reply-To'] = inreplyto
@@ -466,6 +487,7 @@
             if message_files:
                 # first up the text as a part
                 part = MIMEText(body)
+                part.set_charset(charset)
                 encode_quopri(part)
                 message.attach(part)
 
@@ -485,6 +507,12 @@
                         else:
                             part = MIMEText(content)
                             part['Content-Transfer-Encoding'] = '7bit'
+                    elif mime_type == 'message/rfc822':
+                        main, sub = mime_type.split('/')
+                        p = FeedParser()
+                        p.feed(content)
+                        part = MIMEBase(main, sub)
+                        part.set_payload([p.close()])
                     else:
                         # some other type, so encode it
                         if not mime_type:
@@ -496,7 +524,8 @@
                         part = MIMEBase(main, sub)
                         part.set_payload(content)
                         Encoders.encode_base64(part)
-                    part['Content-Disposition'] = 'attachment;\n filename="%s"'%name
+                    cd = 'Content-Disposition'
+                    part[cd] = 'attachment;\n filename="%s"'%name
                     message.attach(part)
 
             else:
@@ -509,7 +538,7 @@
                 mailer.smtp_send(sendto, message.as_string())
             first = False
 
-    def email_signature(self, nodeid, msgid):
+    def email_signature(self, issueid, msgid):
         ''' Add a signature to the e-mail with some useful information
         '''
         # simplistic check to see if the url is valid,
@@ -522,7 +551,7 @@
         else:
             if not base.endswith('/'):
                 base = base + '/'
-            web = base + self.classname + nodeid
+            web = base + self.classname + issueid
 
         # ensure the email address is properly quoted
         email = formataddr((self.db.config.TRACKER_NAME,
@@ -532,7 +561,7 @@
         return '\n%s\n%s\n<%s>\n%s'%(line, email, web, line)
 
 
-    def generateCreateNote(self, nodeid):
+    def generateCreateNote(self, issueid):
         """Generate a create note that lists initial property values
         """
         cn = self.classname
@@ -544,7 +573,7 @@
         prop_items = props.items()
         prop_items.sort()
         for propname, prop in prop_items:
-            value = cl.get(nodeid, propname, None)
+            value = cl.get(issueid, propname, None)
             # skip boring entries
             if not value:
                 continue
@@ -574,7 +603,7 @@
         m.insert(0, '')
         return '\n'.join(m)
 
-    def generateChangeNote(self, nodeid, oldvalues):
+    def generateChangeNote(self, issueid, oldvalues):
         """Generate a change note that lists property changes
         """
         if not isinstance(oldvalues, type({})):
@@ -595,7 +624,7 @@
             # not all keys from oldvalues might be available in database
             # this happens when property was deleted
             try:
-                new_value = cl.get(nodeid, key)
+                new_value = cl.get(issueid, key)
             except KeyError:
                 continue
             # the old value might be non existent
@@ -616,7 +645,7 @@
         changed_items.sort()
         for propname, oldvalue in changed_items:
             prop = props[propname]
-            value = cl.get(nodeid, propname, None)
+            value = cl.get(issueid, propname, None)
             if isinstance(prop, hyperdb.Link):
                 link = self.db.classes[prop.classname]
                 key = link.labelprop(default_to_id=1)

Modified: tracker/roundup-src/roundup/scripts/roundup_mailgw.py
==============================================================================
--- tracker/roundup-src/roundup/scripts/roundup_mailgw.py	(original)
+++ tracker/roundup-src/roundup/scripts/roundup_mailgw.py	Thu Aug  4 15:46:52 2011
@@ -105,6 +105,11 @@
  This supports the same notation as IMAP.
     imaps username:password at server [mailbox]
 
+IMAPS_CRAM:
+ Connect to an IMAP server over ssl using CRAM-MD5 authentication.
+ This supports the same notation as IMAP.
+    imaps_cram username:password at server [mailbox]
+
 """)%{'program': args[0]}
     return 1
 
@@ -153,7 +158,7 @@
     source, specification = args[1:3]
 
     # time out net connections after a minute if we can
-    if source not in ('mailbox', 'imaps'):
+    if source not in ('mailbox', 'imaps', 'imaps_cram'):
         if hasattr(socket, 'setdefaulttimeout'):
             socket.setdefaulttimeout(60)
 
@@ -189,14 +194,19 @@
     elif source == 'apop':
         return handler.do_apop(server, username, password)
     elif source.startswith('imap'):
-        ssl = source.endswith('s')
+        ssl = cram = 0
+        if source.endswith('s'):
+            ssl = 1
+        elif source.endswith('s_cram'):
+            ssl = cram = 1
         mailbox = ''
         if len(args) > 3:
             mailbox = args[3]
-        return handler.do_imap(server, username, password, mailbox, ssl)
+        return handler.do_imap(server, username, password, mailbox, ssl,
+            cram)
 
     return usage(argv, _('Error: The source must be either "mailbox",'
-        ' "pop", "pops", "apop", "imap" or "imaps"'))
+        ' "pop", "pops", "apop", "imap", "imaps" or "imaps_cram'))
 
 def run():
     sys.exit(main(sys.argv))

Modified: tracker/roundup-src/roundup/scripts/roundup_server.py
==============================================================================
--- tracker/roundup-src/roundup/scripts/roundup_server.py	(original)
+++ tracker/roundup-src/roundup/scripts/roundup_server.py	Thu Aug  4 15:46:52 2011
@@ -29,8 +29,6 @@
 except ImportError:
     SSL = None
 
-from time import sleep
-
 # python version check
 from roundup import configuration, version_check
 from roundup import __version__ as roundup_version
@@ -76,7 +74,7 @@
 
 def auto_ssl():
     print _('WARNING: generating temporary SSL certificate')
-    import OpenSSL, time, random, sys
+    import OpenSSL, random
     pkey = OpenSSL.crypto.PKey()
     pkey.generate_key(OpenSSL.crypto.TYPE_RSA, 768)
     cert = OpenSSL.crypto.X509()
@@ -124,14 +122,11 @@
 
                 def readline(self, *args):
                     """ SSL.Connection can return WantRead """
-                    line = None
-                    while not line:
+                    while True:
                         try:
-                            line = self.__fileobj.readline(*args)
+                            return self.__fileobj.readline(*args)
                         except SSL.WantReadError:
-                            sleep (.1)
-                            line = None
-                    return line
+                            time.sleep(.1)
 
                 def read(self, *args):
                     """ SSL.Connection can return WantRead """
@@ -139,7 +134,7 @@
                         try:
                             return self.__fileobj.read(*args)
                         except SSL.WantReadError:
-                            sleep (.1)
+                            time.sleep(.1)
 
                 def __getattr__(self, attrib):
                     return getattr(self.__fileobj, attrib)
@@ -193,8 +188,6 @@
         """ Execute the CGI command. Wrap an innner call in an error
             handler so all errors can be caught.
         """
-        save_stdin = sys.stdin
-        sys.stdin = self.rfile
         try:
             self.inner_run_cgi()
         except client.NotFound:
@@ -231,7 +224,6 @@
                     # out to the logfile
                     print 'EXCEPTION AT', ts
                     traceback.print_exc()
-        sys.stdin = save_stdin
 
     def run_cgi_outer(self):
         "Log requests that are in progress"
@@ -377,10 +369,16 @@
         env['SCRIPT_NAME'] = ''
         env['SERVER_NAME'] = self.server.server_name
         env['SERVER_PORT'] = str(self.server.server_port)
-        env['HTTP_HOST'] = self.headers['host']
+        try:
+            env['HTTP_HOST'] = self.headers ['host']
+        except KeyError:
+            env['HTTP_HOST'] = ''
         if os.environ.has_key('CGI_SHOW_TIMING'):
             env['CGI_SHOW_TIMING'] = os.environ['CGI_SHOW_TIMING']
         env['HTTP_ACCEPT_LANGUAGE'] = self.headers.get('accept-language')
+        range = self.headers.getheader('range')
+        if range:
+            env['HTTP_RANGE'] = range
 
         # do the roundup thing
         tracker = self.get_tracker(tracker_name)
@@ -481,9 +479,16 @@
 
     SETTINGS = (
             ("main", (
-            (configuration.Option, "host", "",
+            (configuration.Option, "host", "localhost",
                 "Host name of the Roundup web server instance.\n"
-                "If empty, listen on all network interfaces."),
+                "If left unconfigured (no 'host' setting) the default\n"
+                "will be used.\n"
+                "If empty, listen on all network interfaces.\n"
+                "If you want to explicitly listen on all\n"
+                "network interfaces, the address 0.0.0.0 is a more\n"
+                "explicit way to achieve this, the use of an empty\n"
+                "string for this purpose is deprecated and will go away\n"
+                "in a future release."),
             (configuration.IntegerNumberOption, "port", DEFAULT_PORT,
                 "Port to listen on."),
             (configuration.NullableFilePathOption, "favicon", "favicon.ico",
@@ -607,9 +612,28 @@
             DEBUG_MODE = self["MULTIPROCESS"] == "debug"
             CONFIG = self
 
+            def setup(self):
+                if self.CONFIG["SSL"]:
+                    # perform initial ssl handshake. This will set
+                    # internal state correctly so that later closing SSL
+                    # socket works (with SSL end-handshake started)
+                    self.request.do_handshake()
+                RoundupRequestHandler.setup(self)
+
+            def finish(self):
+                RoundupRequestHandler.finish(self)
+                if self.CONFIG["SSL"]:
+                    self.request.shutdown()
+                    self.request.close()
+
         if self["SSL"]:
             base_server = SecureHTTPServer
         else:
+            # time out after a minute if we can
+            # This sets the socket to non-blocking. SSL needs a blocking
+            # socket, so we do this only for non-SSL connections.
+            if hasattr(socket, 'setdefaulttimeout'):
+                socket.setdefaulttimeout(60)
             base_server = BaseHTTPServer.HTTPServer
 
         # obtain request server class
@@ -731,7 +755,10 @@
  -h            print this text and exit
  -S            create or update configuration file and exit
  -C <fname>    use configuration file <fname>
- -n <name>     set the host name of the Roundup web server instance
+ -n <name>     set the host name of the Roundup web server instance,
+               specifies on which network interfaces to listen for
+               connections, defaults to localhost, use 0.0.0.0 to bind
+               to all network interfaces
  -p <port>     set the port to listen on (default: %(port)s)
  -l <fname>    log to the file indicated by fname instead of stderr/stdout
  -N            log client machine names instead of IP addresses (much slower)
@@ -831,10 +858,6 @@
 def run(port=undefined, success_message=None):
     ''' Script entry point - handle args and figure out what to to.
     '''
-    # time out after a minute if we can
-    if hasattr(socket, 'setdefaulttimeout'):
-        socket.setdefaulttimeout(60)
-
     config = ServerConfig()
     # additional options
     short_options = "hvS"

Modified: tracker/roundup-src/roundup/security.py
==============================================================================
--- tracker/roundup-src/roundup/security.py	(original)
+++ tracker/roundup-src/roundup/security.py	Thu Aug  4 15:46:52 2011
@@ -54,6 +54,28 @@
         # we have a winner
         return 1
 
+    def searchable(self, classname, property):
+        """ A Permission is searchable for the given permission if it
+            doesn't include a check method and otherwise matches the
+            given parameters.
+        """
+        if self.name not in ('View', 'Search'):
+            return 0
+
+        # are we checking the correct class
+        if self.klass is not None and self.klass != classname:
+            return 0
+
+        # what about property?
+        if not self._properties_dict[property]:
+            return 0
+
+        if self.check:
+            return 0
+
+        return 1
+
+
     def __repr__(self):
         return '<Permission 0x%x %r,%r,%r,%r>'%(id(self), self.name,
             self.klass, self.properties, self.check)
@@ -162,12 +184,9 @@
            Note that this functionality is actually implemented by the
            Permission.test() method.
         '''
-        roles = self.db.user.get(userid, 'roles')
-        if roles is None:
-            return 0
         if itemid and classname is None:
             raise ValueError, 'classname must accompany itemid'
-        for rolename in [x.lower().strip() for x in roles.split(',')]:
+        for rolename in self.db.user.get_roles(userid):
             if not rolename or not self.role.has_key(rolename):
                 continue
             # for each of the user's Roles, check the permissions
@@ -178,6 +197,81 @@
                     return 1
         return 0
 
+    def roleHasSearchPermission(self, classname, property, *rolenames):
+        """ For each of the given roles, check the permissions.
+            Property can be a transitive property.
+        """
+        perms = []
+        # pre-compute permissions
+        for rn in rolenames :
+            for perm in self.role[rn].permissions:
+                perms.append(perm)
+        # Note: break from inner loop means "found"
+        #       break from outer loop means "not found"
+        cn = classname
+        prev = None
+        prop = None
+        Link = hyperdb.Link
+        Multilink = hyperdb.Multilink
+        for propname in property.split('.'):
+            if prev:
+                try:
+                    cn = prop.classname
+                except AttributeError:
+                    break
+            prev = propname
+            try:
+                cls = self.db.getclass(cn)
+                prop = cls.getprops()[propname]
+            except KeyError:
+                break
+            for perm in perms:
+                if perm.searchable(cn, propname):
+                    break
+            else:
+                break
+        else:
+            # for Link and Multilink require search permission on label-
+            # and order-properties and on ID
+            if isinstance(prop, Multilink) or isinstance(prop, Link):
+                try:
+                    cls = self.db.getclass(prop.classname)
+                except KeyError:
+                    return 0
+                props = dict.fromkeys(('id', cls.labelprop(), cls.orderprop()))
+                for p in props.iterkeys():
+                    for perm in perms:
+                        if perm.searchable(prop.classname, p):
+                            break
+                    else:
+                        return 0
+            return 1
+        return 0
+
+    def hasSearchPermission(self, userid, classname, property):
+        '''Look through all the Roles, and hence Permissions, and
+           see if "permission" exists given the constraints of
+           classname and property.
+
+           A search permission is granted if we find a 'View' or
+           'Search' permission for the user which does *not* include
+           a check function. If such a permission is found, the user may
+           search for the given property in the given class.
+
+           Note that classname *and* property are mandatory arguments.
+
+           Contrary to hasPermission, the search will *not* match if
+           there are additional constraints (namely a search function)
+           on a Permission found.
+
+           Concerning property, the Permission matched must have
+           either no properties listed or the property must appear in
+           the list.
+        '''
+        roles = [r for r in self.db.user.get_roles(userid)
+                 if r and self.role.has_key(r)]
+        return self.roleHasSearchPermission (classname, property, *roles)
+
     def addPermission(self, **propspec):
         ''' Create a new Permission with the properties defined in
             'propspec'. See the Permission class for the possible
@@ -211,4 +305,22 @@
         role = self.role[rolename.lower()]
         role.permissions.append(permission)
 
+    # Convenience methods for removing non-allowed properties from a
+    # filterspec or sort/group list
+
+    def filterFilterspec(self, userid, classname, filterspec):
+        """ Return a filterspec that has all non-allowed properties removed.
+        """
+        return dict ([(k, v) for k, v in filterspec.iteritems()
+            if self.hasSearchPermission(userid,classname,k)])
+
+    def filterSortspec(self, userid, classname, sort):
+        """ Return a sort- or group-list that has all non-allowed properties
+            removed.
+        """
+        if isinstance(sort, tuple) and sort[0] in '+-':
+            sort = [sort]
+        return [(d, p) for d, p in sort
+            if self.hasSearchPermission(userid,classname,p)]
+
 # vim: set filetype=python sts=4 sw=4 et si :

Modified: tracker/roundup-src/roundup/xmlrpc.py
==============================================================================
--- tracker/roundup-src/roundup/xmlrpc.py	(original)
+++ tracker/roundup-src/roundup/xmlrpc.py	Thu Aug  4 15:46:52 2011
@@ -10,6 +10,7 @@
 from roundup.date import Date, Range, Interval
 from roundup import actions
 from SimpleXMLRPCServer import *
+from xmlrpclib import Binary
 
 def translate(value):
     """Translate value to becomes valid for XMLRPC transmission."""
@@ -32,12 +33,19 @@
 
     props = {}
     for arg in args:
-        if arg.find('=') == -1:
+        if isinstance(arg, Binary):
+            arg = arg.data
+        try :
+            key, value = arg.split('=', 1)
+        except ValueError :
             raise UsageError, 'argument "%s" not propname=value'%arg
-        l = arg.split('=')
-        if len(l) < 2:
-            raise UsageError, 'argument "%s" not propname=value'%arg
-        key, value = l[0], '='.join(l[1:])
+        if isinstance(key, unicode):
+            try:
+                key = key.encode ('ascii')
+            except UnicodeEncodeError:
+                raise UsageError, 'argument %r is no valid ascii keyword'%key
+        if isinstance(value, unicode):
+            value = value.encode('utf-8')
         if value:
             try:
                 props[key] = hyperdb.rawToHyperdb(db, cl, itemid,
@@ -81,8 +89,24 @@
     def filter(self, classname, search_matches, filterspec,
                sort=[], group=[]):
         cl = self.db.getclass(classname)
+        uid = self.db.getuid()
+        security = self.db.security
+        filterspec = security.filterFilterspec (uid, classname, filterspec)
+        sort = security.filterSortspec (uid, classname, sort)
+        group = security.filterSortspec (uid, classname, group)
         result = cl.filter(search_matches, filterspec, sort=sort, group=group)
-        return result
+        check = security.hasPermission
+        x = [id for id in result if check('View', uid, classname, itemid=id)]
+        return x
+
+    def lookup(self, classname, key):
+        cl = self.db.getclass(classname)
+        uid = self.db.getuid()
+        prop = cl.getkey()
+        check = self.db.security.hasSearchPermission
+        if not check(uid, classname, 'id') or not check(uid, classname, prop):
+            raise Unauthorised('Permission to search %s denied'%classname)
+        return cl.lookup(key)
 
     def display(self, designator, *properties):
         classname, itemid = hyperdb.splitDesignator(designator)
@@ -113,9 +137,9 @@
             raise UsageError, 'you must provide the "%s" property.'%key
 
         for key in props:
-            if not self.db.security.hasPermission('Edit', self.db.getuid(), classname,
-                                                  property=key):
-                raise Unauthorised('Permission to set %s.%s denied'%(classname, key))
+            if not self.db.security.hasPermission('Create', self.db.getuid(),
+                classname, property=key):
+                raise Unauthorised('Permission to create %s.%s denied'%(classname, key))
 
         # do the actual create
         try:

Modified: tracker/roundup-src/scripts/imapServer.py
==============================================================================
--- tracker/roundup-src/scripts/imapServer.py	(original)
+++ tracker/roundup-src/scripts/imapServer.py	Thu Aug  4 15:46:52 2011
@@ -39,7 +39,7 @@
 import time
 
 logging.basicConfig()
-log = logging.getLogger('IMAPServer')
+log = logging.getLogger('roundup.IMAPServer')
 
 version = '0.1.2'
 

Modified: tracker/roundup-src/setup.py
==============================================================================
--- tracker/roundup-src/setup.py	(original)
+++ tracker/roundup-src/setup.py	Thu Aug  4 15:46:52 2011
@@ -23,6 +23,7 @@
 from roundup.dist.command.build_py import build_py
 from roundup.dist.command.build import build, list_message_files
 from roundup.dist.command.bdist_rpm import bdist_rpm
+from roundup.dist.command.install_lib import install_lib
 from distutils.core import setup
 
 import sys, os
@@ -94,6 +95,20 @@
     # perform the setup action
     from roundup import __version__
 
+    # long_description may not contain non-ascii characters. Distutils
+    # will produce an non-installable installer on linux *and* we can't
+    # run the bdist_wininst on Linux if there are non-ascii characters
+    # because the distutils installer will try to use the mbcs codec
+    # which isn't available on non-windows platforms. See also
+    # http://bugs.python.org/issue10945
+    long_description=open('doc/announcement.txt').read().decode('utf8')
+    try:
+        long_description = long_description.encode('ascii')
+    except UnicodeEncodeError, cause:
+        print >> sys.stderr, "doc/announcement.txt contains non-ascii: %s" \
+            % cause
+        sys.exit(42)
+
     setup(name='roundup',
           version=__version__,
           author="Richard Jones",
@@ -101,69 +116,7 @@
           description="A simple-to-use and -install issue-tracking system"
             " with command-line, web and e-mail interfaces. Highly"
             " customisable.",
-          long_description='''This version of Roundup fixes some bugs:
-
-- Minor update of doc/developers.txt to point to the new resources
-  on www.roundup-tracker.org (Bernhard Reiter)
-- Small CSS improvements regaring the search box (thanks Thomas Arendsan Hein)
-  (issue 2550589)
-- Indexers behaviour made more consistent regarding length of indexed words
-  and stopwords (thanks Thomas Arendsen Hein, Bernhard Reiter)(issue 2550584)
-- fixed typos in the installation instructions (thanks Thomas Arendsen Hein)
-  (issue 2550573) 
-- New config option csv_field_size: Pythons csv module (which is used
-  for export/import) has a new field size limit starting with python2.5.
-  We now issue a warning during export if the limit is too small and use
-  the csv_field_size configuration during import to set the limit for
-  the csv module.
-- Small fix for CGI-handling of XMLRPC requests for python2.4, this
-  worked only for 2.5 and beyond due to a change in the xmlrpc interface
-  in python
-- Document filter method of xmlrpc interface
-- Fix interaction of SSL and XMLRPC, now XMLRPC works with SSL
-
-If you're upgrading from an older version of Roundup you *must* follow
-the "Software Upgrade" guidelines given in the maintenance documentation.
-
-Roundup requires python 2.3 or later (but not 3+) for correct operation.
-
-To give Roundup a try, just download (see below), unpack and run::
-
-    roundup-demo
-
-Documentation is available at the website:
-     http://roundup.sourceforge.net/
-Mailing lists - the place to ask questions:
-     http://sourceforge.net/mail/?group_id=31577
-
-About Roundup
-=============
-
-Roundup is a simple-to-use and -install issue-tracking system with
-command-line, web and e-mail interfaces. It is based on the winning design
-from Ka-Ping Yee in the Software Carpentry "Track" design competition.
-
-Note: Ping is not responsible for this project. The contact for this
-project is richard at users.sourceforge.net.
-
-Roundup manages a number of issues (with flexible properties such as
-"description", "priority", and so on) and provides the ability to:
-
-(a) submit new issues,
-(b) find and edit existing issues, and
-(c) discuss issues with other participants.
-
-The system will facilitate communication among the participants by managing
-discussions and notifying interested parties when issues are edited. One of
-the major design goals for Roundup that it be simple to get going. Roundup
-is therefore usable "out of the box" with any python 2.3+ (but not 3+)
-installation. It doesn't even need to be "installed" to be operational,
-though an install script is provided.
-
-It comes with two issue tracker templates (a classic bug/feature tracker and
-a minimal skeleton) and five database back-ends (anydbm, sqlite, metakit,
-mysql and postgresql).
-''',
+          long_description=long_description,
           url='http://www.roundup-tracker.org',
           download_url='http://pypi.python.org/pypi/roundup',
           classifiers=['Development Status :: 5 - Production/Stable',
@@ -188,6 +141,7 @@
                      'build_py': build_py,
                      'build': build,
                      'bdist_rpm': bdist_rpm,
+                     'install_lib': install_lib,
                      },
           packages=packages,
           py_modules=py_modules,

Modified: tracker/roundup-src/share/roundup/templates/classic/html/_generic.index.html
==============================================================================
--- tracker/roundup-src/share/roundup/templates/classic/html/_generic.index.html	(original)
+++ tracker/roundup-src/share/roundup/templates/classic/html/_generic.index.html	Thu Aug  4 15:46:52 2011
@@ -39,7 +39,8 @@
 
 <p class="form-help">
  Remove entries by deleting their line. Add new entries by appending
- them to the table - put an X in the id column.
+ them to the table - put an X in the id column. If you wish to restore a
+ removed item and you know its id then just put that id in the id column.
 </p>
 </tal:block>
 <form onSubmit="return submit_once()" method="POST"

Added: tracker/roundup-src/share/roundup/templates/classic/html/_generic.keywords_expr.html
==============================================================================
--- (empty file)
+++ tracker/roundup-src/share/roundup/templates/classic/html/_generic.keywords_expr.html	Thu Aug  4 15:46:52 2011
@@ -0,0 +1,11 @@
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
+<html>
+ <head>
+  <link rel="stylesheet" type="text/css" href="@@file/style.css" />
+  <meta http-equiv="Content-Type" content="text/html; charset=utf-8;" />
+  <title tal:content="string:Roundup Keywords Expression Editor"></title>
+ </head>
+ <body class="body"
+       tal:content="structure python:utils.keywords_expressions(request)">
+ </body>
+</html>

Modified: tracker/roundup-src/share/roundup/templates/classic/html/issue.search.html
==============================================================================
--- tracker/roundup-src/share/roundup/templates/classic/html/issue.search.html	(original)
+++ tracker/roundup-src/share/roundup/templates/classic/html/issue.search.html	Thu Aug  4 15:46:52 2011
@@ -12,10 +12,10 @@
    cols python:request.columns or 'id activity title status assignedto'.split();
    sort_on python:request.sort and request.sort[0] or nothing;
    sort_desc python:sort_on and sort_on[0] == '-';
-   sort_on python:(sort_on and sort_on[1]) or 'activity';
+   sort_on python:(sort_on and sort_on[1]) or (not request.nodeid and 'activity') or '';
    group_on python:request.group and request.group[0] or nothing;
    group_desc python:group_on and group_on[0] == '-';
-   group_on python:(group_on and group_on[1]) or 'priority';
+   group_on python:(group_on and group_on[1]) or (not request.nodeid and 'priority') or '';
 
    search_input templates/page/macros/search_input;
    search_date templates/page/macros/search_date;
@@ -23,6 +23,7 @@
    sort_input templates/page/macros/sort_input;
    group_input templates/page/macros/group_input;
    search_select templates/page/macros/search_select;
+   search_select_keywords templates/page/macros/search_select_keywords;
    search_select_translated templates/page/macros/search_select_translated;
    search_multiselect templates/page/macros/search_multiselect;">
 
@@ -54,7 +55,7 @@
                 db_klass string:keyword;
                 db_content string:name;">
   <th i18n:translate="">Keyword:</th>
-  <td metal:use-macro="search_select">
+  <td metal:use-macro="search_select_keywords">
     <option metal:fill-slot="extra_options" value="-1" i18n:translate=""
             tal:attributes="selected python:value == '-1'">not selected</option>
   </td>
@@ -167,8 +168,8 @@
  <th i18n:translate="">No Sort or group:</th>
  <td>&nbsp;</td>
  <td>&nbsp;</td>
- <td><input type="radio" name="@sort" value=""></td>
- <td><input type="radio" name="@group" value=""></td>
+ <td><input type="radio" name="@sort" value="" tal:attributes="checked python:sort_on == ''"></td>
+ <td><input type="radio" name="@group" value="" tal:attributes="checked python:group_on == ''"></td>
 </tr>
 
 <tr>

Modified: tracker/roundup-src/share/roundup/templates/classic/html/page.html
==============================================================================
--- tracker/roundup-src/share/roundup/templates/classic/html/page.html	(original)
+++ tracker/roundup-src/share/roundup/templates/classic/html/page.html	Thu Aug  4 15:46:52 2011
@@ -231,7 +231,7 @@
   <input tal:attributes="value python:request.form.getvalue(name) or nothing;
                          name name;
                          id name">
-  <span tal:replace="structure python:db.issue.classhelp(columns,
+  <span tal:replace="structure python:db[db_klass].classhelp(columns,
                                       property=name)" />
 </td>
 
@@ -247,6 +247,22 @@
   </select>
 </td>
 
+<td metal:define-macro="search_select_keywords">
+  <div tal:attributes="id python:'''keywords_%s'''%name">
+    <select tal:attributes="name name; id name"
+            tal:define="value python:request.form.getvalue(name)">
+      <option value="" i18n:translate="">don't care</option>
+      <metal:slot define-slot="extra_options" />
+      <option value="" i18n:translate="" disabled="disabled">------------</option>
+      <option tal:repeat="s python:db[db_klass].list()"
+              tal:attributes="value s/id; selected python:value == s.id"
+              tal:content="python:s[db_content]"></option>
+    </select>
+    <a class="classhelp"
+	   tal:attributes="href python:'''javascript:help_window('issue?@template=keywords_expr&property=%s&form=itemSynopsis', 350, 200)'''%name">(expr)</a>
+  </div>
+</td>
+
 <!-- like search_select, but translates the further values.
 Could extend it (METAL 1.1 attribute "extend-macro")
 -->

Modified: tracker/roundup-src/share/roundup/templates/classic/html/style.css
==============================================================================
--- tracker/roundup-src/share/roundup/templates/classic/html/style.css	(original)
+++ tracker/roundup-src/share/roundup/templates/classic/html/style.css	Thu Aug  4 15:46:52 2011
@@ -413,6 +413,7 @@
   font-weight: bold;
   text-align: left;
 }
+
 input[type="text"]:focus,
 input[type="checkbox"]:focus,
 input[type="radio"]:focus,
@@ -421,5 +422,17 @@
   background-color: #ffffc0;
 }
 
+.calendar_display {
+  text-align: center;
+}
+
+.calendar_display td {
+  padding: 1px 4px 1px 4px;
+}
+
+.calendar_display .today {
+  background-color: #afafaf;
+}
+
 /* vim: sts=2 sw=2 et
 */

Modified: tracker/roundup-src/share/roundup/templates/classic/schema.py
==============================================================================
--- tracker/roundup-src/share/roundup/templates/classic/schema.py	(original)
+++ tracker/roundup-src/share/roundup/templates/classic/schema.py	Thu Aug  4 15:46:52 2011
@@ -112,6 +112,8 @@
     description="User is allowed to view their own user details")
 db.security.addPermissionToRole('User', p)
 p = db.security.addPermission(name='Edit', klass='user', check=own_record,
+    properties=('username', 'password', 'address', 'realname', 'phone',
+        'organisation', 'alternate_addresses', 'queries', 'timezone'),
     description="User is allowed to edit their own user details")
 db.security.addPermissionToRole('User', p)
 
@@ -127,6 +129,8 @@
 p = db.security.addPermission(name='View', klass='query', check=view_query,
     description="User is allowed to view their own and public queries")
 db.security.addPermissionToRole('User', p)
+p = db.security.addPermission(name='Search', klass='query')
+db.security.addPermissionToRole('User', p)
 p = db.security.addPermission(name='Edit', klass='query', check=edit_query,
     description="User is allowed to edit their queries")
 db.security.addPermissionToRole('User', p)

Modified: tracker/roundup-src/share/roundup/templates/minimal/html/_generic.index.html
==============================================================================
--- tracker/roundup-src/share/roundup/templates/minimal/html/_generic.index.html	(original)
+++ tracker/roundup-src/share/roundup/templates/minimal/html/_generic.index.html	Thu Aug  4 15:46:52 2011
@@ -39,7 +39,8 @@
 
 <p class="form-help">
  Remove entries by deleting their line. Add new entries by appending
- them to the table - put an X in the id column.
+ them to the table - put an X in the id column. If you wish to restore a
+ removed item and you know its id then just put that id in the id column.
 </p>
 </tal:block>
 <form onSubmit="return submit_once()" method="POST"

Modified: tracker/roundup-src/share/roundup/templates/minimal/html/page.html
==============================================================================
--- tracker/roundup-src/share/roundup/templates/minimal/html/page.html	(original)
+++ tracker/roundup-src/share/roundup/templates/minimal/html/page.html	Thu Aug  4 15:46:52 2011
@@ -218,7 +218,7 @@
   <input tal:attributes="value python:request.form.getvalue(name) or nothing;
                          name name;
                          id name">
-  <span tal:replace="structure python:db.issue.classhelp(columns,
+  <span tal:replace="structure python:db[db_klass].classhelp(columns,
                                       property=name)" />
 </td>
 

Modified: tracker/roundup-src/share/roundup/templates/minimal/html/style.css
==============================================================================
--- tracker/roundup-src/share/roundup/templates/minimal/html/style.css	(original)
+++ tracker/roundup-src/share/roundup/templates/minimal/html/style.css	Thu Aug  4 15:46:52 2011
@@ -50,9 +50,6 @@
   padding: 5px;
   border-bottom: 1px solid #444;
 }
-#searchbox {
-    float: right;
-}
 
 div#body-title {
   float: left;
@@ -127,7 +124,7 @@
 /* style for search forms */
 ul.search-checkboxes {
     display: inline;
-    padding: none;
+    padding: 0;
     list-style: none;
 }
 ul.search-checkboxes > li {
@@ -421,3 +418,26 @@
   font-weight: bold;
   text-align: left;
 }
+
+input[type="text"]:focus,
+input[type="checkbox"]:focus,
+input[type="radio"]:focus,
+input[type="password"]:focus,
+textarea:focus, select:focus {
+  background-color: #ffffc0;
+}
+
+.calendar_display {
+  text-align: center;
+}
+
+.calendar_display td {
+  padding: 1px 4px 1px 4px;
+}
+
+.calendar_display .today {
+  background-color: #afafaf;
+}
+
+/* vim: sts=2 sw=2 et
+*/

Modified: tracker/roundup-src/share/roundup/templates/minimal/schema.py
==============================================================================
--- tracker/roundup-src/share/roundup/templates/minimal/schema.py	(original)
+++ tracker/roundup-src/share/roundup/templates/minimal/schema.py	Thu Aug  4 15:46:52 2011
@@ -41,6 +41,7 @@
     description="User is allowed to view their own user details")
 db.security.addPermissionToRole('User', p)
 p = db.security.addPermission(name='Edit', klass='user', check=own_record,
+    properties=('username', 'password', 'address', 'alternate_addresses'),
     description="User is allowed to edit their own user details")
 db.security.addPermissionToRole('User', p)
 

Modified: tracker/roundup-src/test/db_test_base.py
==============================================================================
--- tracker/roundup-src/test/db_test_base.py	(original)
+++ tracker/roundup-src/test/db_test_base.py	Thu Aug  4 15:46:52 2011
@@ -24,7 +24,8 @@
 from roundup.hyperdb import String, Password, Link, Multilink, Date, \
     Interval, DatabaseError, Boolean, Number, Node
 from roundup.mailer import Mailer
-from roundup import date, password, init, instance, configuration, support
+from roundup import date, password, init, instance, configuration, \
+    roundupdb, i18n
 
 from mocknull import MockNull
 
@@ -34,6 +35,7 @@
 config.RDBMS_HOST = "localhost"
 config.RDBMS_USER = "rounduptest"
 config.RDBMS_PASSWORD = "rounduptest"
+config.RDBMS_TEMPLATE = "template0"
 #config.logging = MockNull()
 # these TRACKER_WEB and MAIL_DOMAIN values are used in mailgw tests
 config.MAIL_DOMAIN = "your.tracker.email.domain.example"
@@ -113,6 +115,9 @@
         priority.create(name="bug", order="1")
     db.commit()
 
+    # nosy tests require this
+    db.security.addPermissionToRole('User', 'View', 'msg')
+
 class MyTestCase(unittest.TestCase):
     def tearDown(self):
         if hasattr(self, 'db'):
@@ -120,21 +125,72 @@
         if os.path.exists(config.DATABASE):
             shutil.rmtree(config.DATABASE)
 
+    def open_database(self):
+        self.db = self.module.Database(config, 'admin')
+
+
 if os.environ.has_key('LOGGING_LEVEL'):
     from roundup import rlog
     config.logging = rlog.BasicLogging()
     config.logging.setLevel(os.environ['LOGGING_LEVEL'])
-    config.logging.getLogger('hyperdb').setFormat('%(message)s')
+    config.logging.getLogger('roundup.hyperdb').setFormat('%(message)s')
 
-class DBTest(MyTestCase):
+class commonDBTest(MyTestCase):
     def setUp(self):
         # remove previous test, ignore errors
         if os.path.exists(config.DATABASE):
             shutil.rmtree(config.DATABASE)
         os.makedirs(config.DATABASE + '/files')
-        self.db = self.module.Database(config, 'admin')
+        self.open_database()
         setupSchema(self.db, 1, self.module)
 
+    def iterSetup(self, classname='issue'):
+        cls = getattr(self.db, classname)
+        def filt_iter(*args):
+            """ for checking equivalence of filter and filter_iter """
+            return list(cls.filter_iter(*args))
+        return self.assertEqual, cls.filter, filt_iter
+
+    def filteringSetupTransitiveSearch(self, classname='issue'):
+        u_m = {}
+        k = 30
+        for user in (
+                {'username': 'ceo', 'age': 129},
+                {'username': 'grouplead1', 'age': 29, 'supervisor': '3'},
+                {'username': 'grouplead2', 'age': 29, 'supervisor': '3'},
+                {'username': 'worker1', 'age': 25, 'supervisor' : '4'},
+                {'username': 'worker2', 'age': 24, 'supervisor' : '4'},
+                {'username': 'worker3', 'age': 23, 'supervisor' : '5'},
+                {'username': 'worker4', 'age': 22, 'supervisor' : '5'},
+                {'username': 'worker5', 'age': 21, 'supervisor' : '5'}):
+            u = self.db.user.create(**user)
+            u_m [u] = self.db.msg.create(author = u, content = ' '
+                , date = date.Date ('2006-01-%s' % k))
+            k -= 1
+        i = date.Interval('-1d')
+        for issue in (
+                {'title': 'ts1', 'status': '2', 'assignedto': '6',
+                    'priority': '3', 'messages' : [u_m ['6']], 'nosy' : ['4']},
+                {'title': 'ts2', 'status': '1', 'assignedto': '6',
+                    'priority': '3', 'messages' : [u_m ['6']], 'nosy' : ['5']},
+                {'title': 'ts4', 'status': '2', 'assignedto': '7',
+                    'priority': '3', 'messages' : [u_m ['7']]},
+                {'title': 'ts5', 'status': '1', 'assignedto': '8',
+                    'priority': '3', 'messages' : [u_m ['8']]},
+                {'title': 'ts6', 'status': '2', 'assignedto': '9',
+                    'priority': '3', 'messages' : [u_m ['9']]},
+                {'title': 'ts7', 'status': '1', 'assignedto': '10',
+                    'priority': '3', 'messages' : [u_m ['10']]},
+                {'title': 'ts8', 'status': '2', 'assignedto': '10',
+                    'priority': '3', 'messages' : [u_m ['10']], 'foo' : i},
+                {'title': 'ts9', 'status': '1', 'assignedto': '10',
+                    'priority': '3', 'messages' : [u_m ['10'], u_m ['9']]}):
+            self.db.issue.create(**issue)
+        return self.iterSetup(classname)
+
+
+class DBTest(commonDBTest):
+
     def testRefresh(self):
         self.db.refresh_database()
 
@@ -144,11 +200,7 @@
     def testCreatorProperty(self):
         i = self.db.issue
         id1 = i.create(title='spam')
-        self.db.commit()
-        self.db.close()
-        self.db = self.module.Database(config, 'fred')
-        setupSchema(self.db, 0, self.module)
-        i = self.db.issue
+        self.db.journaltag = 'fred'
         id2 = i.create(title='spam')
         self.assertNotEqual(id1, id2)
         self.assertNotEqual(i.get(id1, 'creator'), i.get(id2, 'creator'))
@@ -156,11 +208,7 @@
     def testActorProperty(self):
         i = self.db.issue
         id1 = i.create(title='spam')
-        self.db.commit()
-        self.db.close()
-        self.db = self.module.Database(config, 'fred')
-        setupSchema(self.db, 0, self.module)
-        i = self.db.issue
+        self.db.journaltag = 'fred'
         i.set(id1, title='asfasd')
         self.assertNotEqual(i.get(id1, 'creator'), i.get(id1, 'actor'))
 
@@ -273,6 +321,23 @@
             if commit: self.db.commit()
             self.assertEqual(self.db.issue.get(nid, "nosy"), [])
 
+    def testMakeSeveralMultilinkedNodes(self):
+        for commit in (0,1):
+            u1 = self.db.user.create(username='foo%s'%commit)
+            u2 = self.db.user.create(username='bar%s'%commit)
+            u3 = self.db.user.create(username='baz%s'%commit)
+            nid = self.db.issue.create(title="spam", nosy=[u1])
+            if commit: self.db.commit()
+            self.assertEqual(self.db.issue.get(nid, "nosy"), [u1])
+            self.db.issue.set(nid, deadline=date.Date('.'))
+            self.db.issue.set(nid, nosy=[u1,u2], title='ta%s'%commit)
+            if commit: self.db.commit()
+            self.assertEqual(self.db.issue.get(nid, "nosy"), [u1,u2])
+            self.db.issue.set(nid, deadline=date.Date('.'))
+            self.db.issue.set(nid, nosy=[u1,u2,u3], title='tb%s'%commit)
+            if commit: self.db.commit()
+            self.assertEqual(self.db.issue.get(nid, "nosy"), [u1,u2,u3])
+
     def testMultilinkChangeIterable(self):
         for commit in (0,1):
             # invalid nosy value assertion
@@ -344,11 +409,15 @@
             '2008-02-29.00:00:00')
         self.assertEquals(self.db.issue.filter(None,
             {'deadline': '2008-02-29'}), [nid])
+        self.assertEquals(list(self.db.issue.filter_iter(None,
+            {'deadline': '2008-02-29'})), [nid])
         self.db.issue.set(nid, deadline=date.Date('2008-03-01'))
         self.assertEquals(str(self.db.issue.get(nid, 'deadline')),
             '2008-03-01.00:00:00')
         self.assertEquals(self.db.issue.filter(None,
             {'deadline': '2008-02-29'}), [])
+        self.assertEquals(list(self.db.issue.filter_iter(None,
+            {'deadline': '2008-02-29'})), [])
 
     def testDateUnset(self):
         for commit in (0,1):
@@ -852,6 +921,7 @@
         self.assertEquals(self.db.indexer.search([], self.db.issue), {})
         self.assertEquals(self.db.indexer.search(['hello'], self.db.issue),
             {i1: {'files': [f1]}})
+        # content='world' has the wrong content-type and shouldn't be indexed
         self.assertEquals(self.db.indexer.search(['world'], self.db.issue), {})
         self.assertEquals(self.db.indexer.search(['frooz'], self.db.issue),
             {i2: {}})
@@ -960,45 +1030,17 @@
         self.assertEquals(self.db.indexer.search(['flebble'], self.db.issue),
             {'1': {}})
 
-    def testIndexingOnImport(self):
-        # import a message
-        msgcontent = 'Glrk'
-        msgid = self.db.msg.import_list(['content', 'files', 'recipients'],
-                                        [repr(msgcontent), '[]', '[]'])
-        msg_filename = self.db.filename(self.db.msg.classname, msgid,
-                                        create=1)
-        support.ensureParentsExist(msg_filename)
-        msg_file = open(msg_filename, 'w')
-        msg_file.write(msgcontent)
-        msg_file.close()
-
-        # import a file
-        filecontent = 'Brrk'
-        fileid = self.db.file.import_list(['content'], [repr(filecontent)])
-        file_filename = self.db.filename(self.db.file.classname, fileid,
-                                         create=1)
-        support.ensureParentsExist(file_filename)
-        file_file = open(file_filename, 'w')
-        file_file.write(filecontent)
-        file_file.close()
-
+    def testIndexingPropertiesOnImport(self):
         # import an issue
         title = 'Bzzt'
         nodeid = self.db.issue.import_list(['title', 'messages', 'files',
-            'spam', 'nosy', 'superseder'], [repr(title), repr([msgid]),
-            repr([fileid]), '[]', '[]', '[]'])
+            'spam', 'nosy', 'superseder'], [repr(title), '[]', '[]',
+            '[]', '[]', '[]'])
         self.db.commit()
 
         # Content of title attribute is indexed
         self.assertEquals(self.db.indexer.search([title], self.db.issue),
             {str(nodeid):{}})
-        # Content of message is indexed
-        self.assertEquals(self.db.indexer.search([msgcontent], self.db.issue),
-            {str(nodeid):{'messages':[str(msgid)]}})
-        # Content of file is indexed
-        self.assertEquals(self.db.indexer.search([filecontent], self.db.issue),
-            {str(nodeid):{'files':[str(fileid)]}})
-
 
 
     #
@@ -1113,13 +1155,12 @@
         self.db.issue.retire(ids[0])
         self.assertEqual(len(self.db.issue.stringFind(title='spam')), 1)
 
-    def filteringSetup(self):
+    def filteringSetup(self, classname='issue'):
         for user in (
-                {'username': 'bleep', 'age': 1},
-                {'username': 'blop', 'age': 1.5},
-                {'username': 'blorp', 'age': 2}):
+                {'username': 'bleep', 'age': 1, 'assignable': True},
+                {'username': 'blop', 'age': 1.5, 'assignable': True},
+                {'username': 'blorp', 'age': 2, 'assignable': False}):
             self.db.user.create(**user)
-        iss = self.db.issue
         file_content = ''.join([chr(i) for i in range(255)])
         f = self.db.file.create(content=file_content)
         for issue in (
@@ -1137,92 +1178,133 @@
                     'files': [f]}):
             self.db.issue.create(**issue)
         self.db.commit()
-        return self.assertEqual, self.db.issue.filter
+        return self.iterSetup(classname)
 
     def testFilteringID(self):
-        ae, filt = self.filteringSetup()
-        ae(filt(None, {'id': '1'}, ('+','id'), (None,None)), ['1'])
-        ae(filt(None, {'id': '2'}, ('+','id'), (None,None)), ['2'])
-        ae(filt(None, {'id': '100'}, ('+','id'), (None,None)), [])
+        ae, filter, filter_iter = self.filteringSetup()
+        for filt in filter, filter_iter:
+            ae(filt(None, {'id': '1'}, ('+','id'), (None,None)), ['1'])
+            ae(filt(None, {'id': '2'}, ('+','id'), (None,None)), ['2'])
+            ae(filt(None, {'id': '100'}, ('+','id'), (None,None)), [])
+
+    def testFilteringBoolean(self):
+        ae, filter, filter_iter = self.filteringSetup('user')
+        a = 'assignable'
+        for filt in filter, filter_iter:
+            ae(filt(None, {a: '1'}, ('+','id'), (None,None)), ['3','4'])
+            ae(filt(None, {a: '0'}, ('+','id'), (None,None)), ['5'])
+            ae(filt(None, {a: ['1']}, ('+','id'), (None,None)), ['3','4'])
+            ae(filt(None, {a: ['0']}, ('+','id'), (None,None)), ['5'])
+            ae(filt(None, {a: ['0','1']}, ('+','id'), (None,None)),
+                ['3','4','5'])
+            ae(filt(None, {a: 'True'}, ('+','id'), (None,None)), ['3','4'])
+            ae(filt(None, {a: 'False'}, ('+','id'), (None,None)), ['5'])
+            ae(filt(None, {a: ['True']}, ('+','id'), (None,None)), ['3','4'])
+            ae(filt(None, {a: ['False']}, ('+','id'), (None,None)), ['5'])
+            ae(filt(None, {a: ['False','True']}, ('+','id'), (None,None)),
+                ['3','4','5'])
+            ae(filt(None, {a: True}, ('+','id'), (None,None)), ['3','4'])
+            ae(filt(None, {a: False}, ('+','id'), (None,None)), ['5'])
+            ae(filt(None, {a: 1}, ('+','id'), (None,None)), ['3','4'])
+            ae(filt(None, {a: 0}, ('+','id'), (None,None)), ['5'])
+            ae(filt(None, {a: [1]}, ('+','id'), (None,None)), ['3','4'])
+            ae(filt(None, {a: [0]}, ('+','id'), (None,None)), ['5'])
+            ae(filt(None, {a: [0,1]}, ('+','id'), (None,None)), ['3','4','5'])
+            ae(filt(None, {a: [True]}, ('+','id'), (None,None)), ['3','4'])
+            ae(filt(None, {a: [False]}, ('+','id'), (None,None)), ['5'])
+            ae(filt(None, {a: [False,True]}, ('+','id'), (None,None)),
+                ['3','4','5'])
 
     def testFilteringNumber(self):
-        self.filteringSetup()
-        ae, filt = self.assertEqual, self.db.user.filter
-        ae(filt(None, {'age': '1'}, ('+','id'), (None,None)), ['3'])
-        ae(filt(None, {'age': '1.5'}, ('+','id'), (None,None)), ['4'])
-        ae(filt(None, {'age': '2'}, ('+','id'), (None,None)), ['5'])
-        ae(filt(None, {'age': ['1','2']}, ('+','id'), (None,None)), ['3','5'])
+        ae, filter, filter_iter = self.filteringSetup('user')
+        for filt in filter, filter_iter:
+            ae(filt(None, {'age': '1'}, ('+','id'), (None,None)), ['3'])
+            ae(filt(None, {'age': '1.5'}, ('+','id'), (None,None)), ['4'])
+            ae(filt(None, {'age': '2'}, ('+','id'), (None,None)), ['5'])
+            ae(filt(None, {'age': ['1','2']}, ('+','id'), (None,None)),
+                ['3','5'])
+            ae(filt(None, {'age': 2}, ('+','id'), (None,None)), ['5'])
+            ae(filt(None, {'age': [1,2]}, ('+','id'), (None,None)), ['3','5'])
 
     def testFilteringString(self):
-        ae, filt = self.filteringSetup()
-        ae(filt(None, {'title': ['one']}, ('+','id'), (None,None)), ['1'])
-        ae(filt(None, {'title': ['issue one']}, ('+','id'), (None,None)),
-            ['1'])
-        ae(filt(None, {'title': ['issue', 'one']}, ('+','id'), (None,None)),
-            ['1'])
-        ae(filt(None, {'title': ['issue']}, ('+','id'), (None,None)),
-            ['1','2','3'])
-        ae(filt(None, {'title': ['one', 'two']}, ('+','id'), (None,None)),
-            [])
+        ae, filter, filter_iter = self.filteringSetup()
+        for filt in filter, filter_iter:
+            ae(filt(None, {'title': ['one']}, ('+','id'), (None,None)), ['1'])
+            ae(filt(None, {'title': ['issue one']}, ('+','id'), (None,None)),
+                ['1'])
+            ae(filt(None, {'title': ['issue', 'one']}, ('+','id'), (None,None)),
+                ['1'])
+            ae(filt(None, {'title': ['issue']}, ('+','id'), (None,None)),
+                ['1','2','3'])
+            ae(filt(None, {'title': ['one', 'two']}, ('+','id'), (None,None)),
+                [])
 
     def testFilteringLink(self):
-        ae, filt = self.filteringSetup()
-        ae(filt(None, {'status': '1'}, ('+','id'), (None,None)), ['2','3'])
-        ae(filt(None, {'assignedto': '-1'}, ('+','id'), (None,None)), ['3','4'])
-        ae(filt(None, {'assignedto': None}, ('+','id'), (None,None)), ['3','4'])
-        ae(filt(None, {'assignedto': [None]}, ('+','id'), (None,None)),
-            ['3','4'])
-        ae(filt(None, {'assignedto': ['-1', None]}, ('+','id'), (None,None)),
-            ['3','4'])
-        ae(filt(None, {'assignedto': ['1', None]}, ('+','id'), (None,None)),
-            ['1', '3','4'])
+        ae, filter, filter_iter = self.filteringSetup()
+        a = 'assignedto'
+        grp = (None, None)
+        for filt in filter, filter_iter:
+            ae(filt(None, {'status': '1'}, ('+','id'), grp), ['2','3'])
+            ae(filt(None, {a: '-1'}, ('+','id'), grp), ['3','4'])
+            ae(filt(None, {a: None}, ('+','id'), grp), ['3','4'])
+            ae(filt(None, {a: [None]}, ('+','id'), grp), ['3','4'])
+            ae(filt(None, {a: ['-1', None]}, ('+','id'), grp), ['3','4'])
+            ae(filt(None, {a: ['1', None]}, ('+','id'), grp), ['1', '3','4'])
 
     def testFilteringMultilinkAndGroup(self):
         """testFilteringMultilinkAndGroup:
         See roundup Bug 1541128: apparently grouping by something and
         searching a Multilink failed with MySQL 5.0
         """
-        ae, filt = self.filteringSetup()
-        ae(filt(None, {'files': '1'}, ('-','activity'), ('+','status')), ['4'])
+        ae, filter, filter_iter = self.filteringSetup()
+        for f in filter, filter_iter:
+            ae(f(None, {'files': '1'}, ('-','activity'), ('+','status')), ['4'])
 
     def testFilteringRetired(self):
-        ae, filt = self.filteringSetup()
+        ae, filter, filter_iter = self.filteringSetup()
         self.db.issue.retire('2')
-        ae(filt(None, {'status': '1'}, ('+','id'), (None,None)), ['3'])
+        for f in filter, filter_iter:
+            ae(f(None, {'status': '1'}, ('+','id'), (None,None)), ['3'])
 
     def testFilteringMultilink(self):
-        ae, filt = self.filteringSetup()
-        ae(filt(None, {'nosy': '3'}, ('+','id'), (None,None)), ['4'])
-        ae(filt(None, {'nosy': '-1'}, ('+','id'), (None,None)), ['1', '2'])
-        ae(filt(None, {'nosy': ['1','2']}, ('+', 'status'),
-            ('-', 'deadline')), ['4', '3'])
+        ae, filter, filter_iter = self.filteringSetup()
+        for filt in filter, filter_iter:
+            ae(filt(None, {'nosy': '3'}, ('+','id'), (None,None)), ['4'])
+            ae(filt(None, {'nosy': '-1'}, ('+','id'), (None,None)), ['1', '2'])
+            ae(filt(None, {'nosy': ['1','2']}, ('+', 'status'),
+                ('-', 'deadline')), ['4', '3'])
 
     def testFilteringMany(self):
-        ae, filt = self.filteringSetup()
-        ae(filt(None, {'nosy': '2', 'status': '1'}, ('+','id'), (None,None)),
-            ['3'])
+        ae, filter, filter_iter = self.filteringSetup()
+        for f in filter, filter_iter:
+            ae(f(None, {'nosy': '2', 'status': '1'}, ('+','id'), (None,None)),
+                ['3'])
 
     def testFilteringRangeBasic(self):
-        ae, filt = self.filteringSetup()
-        ae(filt(None, {'deadline': 'from 2003-02-10 to 2003-02-23'}), ['1','3'])
-        ae(filt(None, {'deadline': '2003-02-10; 2003-02-23'}), ['1','3'])
-        ae(filt(None, {'deadline': '; 2003-02-16'}), ['2'])
+        ae, filter, filter_iter = self.filteringSetup()
+        d = 'deadline'
+        for f in filter, filter_iter:
+            ae(f(None, {d: 'from 2003-02-10 to 2003-02-23'}), ['1','3'])
+            ae(f(None, {d: '2003-02-10; 2003-02-23'}), ['1','3'])
+            ae(f(None, {d: '; 2003-02-16'}), ['2'])
 
     def testFilteringRangeTwoSyntaxes(self):
-        ae, filt = self.filteringSetup()
-        ae(filt(None, {'deadline': 'from 2003-02-16'}), ['1', '3', '4'])
-        ae(filt(None, {'deadline': '2003-02-16;'}), ['1', '3', '4'])
+        ae, filter, filter_iter = self.filteringSetup()
+        for filt in filter, filter_iter:
+            ae(filt(None, {'deadline': 'from 2003-02-16'}), ['1', '3', '4'])
+            ae(filt(None, {'deadline': '2003-02-16;'}), ['1', '3', '4'])
 
     def testFilteringRangeYearMonthDay(self):
-        ae, filt = self.filteringSetup()
-        ae(filt(None, {'deadline': '2002'}), [])
-        ae(filt(None, {'deadline': '2003'}), ['1', '2', '3'])
-        ae(filt(None, {'deadline': '2004'}), ['4'])
-        ae(filt(None, {'deadline': '2003-02-16'}), ['1'])
-        ae(filt(None, {'deadline': '2003-02-17'}), [])
+        ae, filter, filter_iter = self.filteringSetup()
+        for filt in filter, filter_iter:
+            ae(filt(None, {'deadline': '2002'}), [])
+            ae(filt(None, {'deadline': '2003'}), ['1', '2', '3'])
+            ae(filt(None, {'deadline': '2004'}), ['4'])
+            ae(filt(None, {'deadline': '2003-02-16'}), ['1'])
+            ae(filt(None, {'deadline': '2003-02-17'}), [])
 
     def testFilteringRangeMonths(self):
-        ae, filt = self.filteringSetup()
+        ae, filter, filter_iter = self.filteringSetup()
         for month in range(1, 13):
             for n in range(1, month+1):
                 i = self.db.issue.create(title='%d.%d'%(month, n),
@@ -1230,55 +1312,61 @@
         self.db.commit()
 
         for month in range(1, 13):
-            r = filt(None, dict(deadline='2001-%02d'%month))
-            assert len(r) == month, 'month %d != length %d'%(month, len(r))
+            for filt in filter, filter_iter:
+                r = filt(None, dict(deadline='2001-%02d'%month))
+                assert len(r) == month, 'month %d != length %d'%(month, len(r))
 
     def testFilteringRangeInterval(self):
-        ae, filt = self.filteringSetup()
-        ae(filt(None, {'foo': 'from 0:50 to 2:00'}), ['1'])
-        ae(filt(None, {'foo': 'from 0:50 to 1d 2:00'}), ['1', '2'])
-        ae(filt(None, {'foo': 'from 5:50'}), ['2'])
-        ae(filt(None, {'foo': 'to 0:05'}), [])
+        ae, filter, filter_iter = self.filteringSetup()
+        for filt in filter, filter_iter:
+            ae(filt(None, {'foo': 'from 0:50 to 2:00'}), ['1'])
+            ae(filt(None, {'foo': 'from 0:50 to 1d 2:00'}), ['1', '2'])
+            ae(filt(None, {'foo': 'from 5:50'}), ['2'])
+            ae(filt(None, {'foo': 'to 0:05'}), [])
 
     def testFilteringRangeGeekInterval(self):
-        ae, filt = self.filteringSetup()
+        ae, filter, filter_iter = self.filteringSetup()
         for issue in (
                 { 'deadline': date.Date('. -2d')},
                 { 'deadline': date.Date('. -1d')},
                 { 'deadline': date.Date('. -8d')},
                 ):
             self.db.issue.create(**issue)
-        ae(filt(None, {'deadline': '-2d;'}), ['5', '6'])
-        ae(filt(None, {'deadline': '-1d;'}), ['6'])
-        ae(filt(None, {'deadline': '-1w;'}), ['5', '6'])
+        for filt in filter, filter_iter:
+            ae(filt(None, {'deadline': '-2d;'}), ['5', '6'])
+            ae(filt(None, {'deadline': '-1d;'}), ['6'])
+            ae(filt(None, {'deadline': '-1w;'}), ['5', '6'])
 
     def testFilteringIntervalSort(self):
         # 1: '1:10'
         # 2: '1d'
         # 3: None
         # 4: '0:10'
-        ae, filt = self.filteringSetup()
-        # ascending should sort None, 1:10, 1d
-        ae(filt(None, {}, ('+','foo'), (None,None)), ['3', '4', '1', '2'])
-        # descending should sort 1d, 1:10, None
-        ae(filt(None, {}, ('-','foo'), (None,None)), ['2', '1', '4', '3'])
+        ae, filter, filter_iter = self.filteringSetup()
+        for filt in filter, filter_iter:
+            # ascending should sort None, 1:10, 1d
+            ae(filt(None, {}, ('+','foo'), (None,None)), ['3', '4', '1', '2'])
+            # descending should sort 1d, 1:10, None
+            ae(filt(None, {}, ('-','foo'), (None,None)), ['2', '1', '4', '3'])
 
     def testFilteringStringSort(self):
         # 1: 'issue one'
         # 2: 'issue two'
         # 3: 'issue three'
         # 4: 'non four'
-        ae, filt = self.filteringSetup()
-        ae(filt(None, {}, ('+','title')), ['1', '3', '2', '4'])
-        ae(filt(None, {}, ('-','title')), ['4', '2', '3', '1'])
+        ae, filter, filter_iter = self.filteringSetup()
+        for filt in filter, filter_iter:
+            ae(filt(None, {}, ('+','title')), ['1', '3', '2', '4'])
+            ae(filt(None, {}, ('-','title')), ['4', '2', '3', '1'])
         # Test string case: For now allow both, w/wo case matching.
         # 1: 'issue one'
         # 2: 'issue two'
         # 3: 'Issue three'
         # 4: 'non four'
         self.db.issue.set('3', title='Issue three')
-        ae(filt(None, {}, ('+','title')), ['1', '3', '2', '4'])
-        ae(filt(None, {}, ('-','title')), ['4', '2', '3', '1'])
+        for filt in filter, filter_iter:
+            ae(filt(None, {}, ('+','title')), ['1', '3', '2', '4'])
+            ae(filt(None, {}, ('-','title')), ['4', '2', '3', '1'])
         # Obscure bug in anydbm backend trying to convert to number
         # 1: '1st issue'
         # 2: '2'
@@ -1286,8 +1374,9 @@
         # 4: 'non four'
         self.db.issue.set('1', title='1st issue')
         self.db.issue.set('2', title='2')
-        ae(filt(None, {}, ('+','title')), ['1', '2', '3', '4'])
-        ae(filt(None, {}, ('-','title')), ['4', '3', '2', '1'])
+        for filt in filter, filter_iter:
+            ae(filt(None, {}, ('+','title')), ['1', '2', '3', '4'])
+            ae(filt(None, {}, ('-','title')), ['4', '3', '2', '1'])
 
     def testFilteringMultilinkSort(self):
         # 1: []                 Reverse:  1: []
@@ -1297,7 +1386,9 @@
         # Note the sort order for the multilink doen't change when
         # reversing the sort direction due to the re-sorting of the
         # multilink!
-        ae, filt = self.filteringSetup()
+        # Note that we don't test filter_iter here, Multilink sort-order
+        # isn't defined for that.
+        ae, filt, dummy = self.filteringSetup()
         ae(filt(None, {}, ('+','nosy'), (None,None)), ['1', '2', '4', '3'])
         ae(filt(None, {}, ('-','nosy'), (None,None)), ['4', '3', '1', '2'])
 
@@ -1306,7 +1397,9 @@
         # 2: status: 1 "unread"      nosy: []
         # 3: status: 1 "unread"      nosy: ['admin','fred']
         # 4: status: 3 "testing"     nosy: ['admin','bleep','fred']
-        ae, filt = self.filteringSetup()
+        # Note that we don't test filter_iter here, Multilink sort-order
+        # isn't defined for that.
+        ae, filt, dummy = self.filteringSetup()
         ae(filt(None, {}, ('+','nosy'), ('+','status')), ['1', '4', '2', '3'])
         ae(filt(None, {}, ('-','nosy'), ('+','status')), ['1', '4', '3', '2'])
         ae(filt(None, {}, ('+','nosy'), ('-','status')), ['2', '3', '4', '1'])
@@ -1321,228 +1414,202 @@
         # 2: status: 1 -> 'u', priority: 3 -> 1
         # 3: status: 1 -> 'u', priority: 2 -> 3
         # 4: status: 3 -> 't', priority: 2 -> 3
-        ae, filt = self.filteringSetup()
-        ae(filt(None, {}, ('+','status'), ('+','priority')),
-            ['1', '2', '4', '3'])
-        ae(filt(None, {'priority':'2'}, ('+','status'), ('+','priority')),
-            ['4', '3'])
-        ae(filt(None, {'priority.order':'3'}, ('+','status'), ('+','priority')),
-            ['4', '3'])
-        ae(filt(None, {'priority':['2','3']}, ('+','priority'), ('+','status')),
-            ['1', '4', '2', '3'])
-        ae(filt(None, {}, ('+','priority'), ('+','status')),
-            ['1', '4', '2', '3'])
+        ae, filter, filter_iter = self.filteringSetup()
+        for filt in filter, filter_iter:
+            ae(filt(None, {}, ('+','status'), ('+','priority')),
+                ['1', '2', '4', '3'])
+            ae(filt(None, {'priority':'2'}, ('+','status'), ('+','priority')),
+                ['4', '3'])
+            ae(filt(None, {'priority.order':'3'}, ('+','status'),
+                ('+','priority')), ['4', '3'])
+            ae(filt(None, {'priority':['2','3']}, ('+','priority'),
+                ('+','status')), ['1', '4', '2', '3'])
+            ae(filt(None, {}, ('+','priority'), ('+','status')),
+                ['1', '4', '2', '3'])
 
     def testFilteringDateSort(self):
         # '1': '2003-02-16.22:50'
         # '2': '2003-01-01.00:00'
         # '3': '2003-02-18'
         # '4': '2004-03-08'
-        ae, filt = self.filteringSetup()
-        # ascending
-        ae(filt(None, {}, ('+','deadline'), (None,None)), ['2', '1', '3', '4'])
-        # descending
-        ae(filt(None, {}, ('-','deadline'), (None,None)), ['4', '3', '1', '2'])
+        ae, filter, filter_iter = self.filteringSetup()
+        for f in filter, filter_iter:
+            # ascending
+            ae(f(None, {}, ('+','deadline'), (None,None)), ['2', '1', '3', '4'])
+            # descending
+            ae(f(None, {}, ('-','deadline'), (None,None)), ['4', '3', '1', '2'])
 
     def testFilteringDateSortPriorityGroup(self):
         # '1': '2003-02-16.22:50'  1 => 2
         # '2': '2003-01-01.00:00'  3 => 1
         # '3': '2003-02-18'        2 => 3
         # '4': '2004-03-08'        1 => 2
-        ae, filt = self.filteringSetup()
+        ae, filter, filter_iter = self.filteringSetup()
 
-        # ascending
-        ae(filt(None, {}, ('+','deadline'), ('+','priority')),
-            ['2', '1', '3', '4'])
-        ae(filt(None, {}, ('-','deadline'), ('+','priority')),
-            ['1', '2', '4', '3'])
-        # descending
-        ae(filt(None, {}, ('+','deadline'), ('-','priority')),
-            ['3', '4', '2', '1'])
-        ae(filt(None, {}, ('-','deadline'), ('-','priority')),
-            ['4', '3', '1', '2'])
-
-    def filteringSetupTransitiveSearch(self):
-        u_m = {}
-        k = 30
-        for user in (
-                {'username': 'ceo', 'age': 129},
-                {'username': 'grouplead1', 'age': 29, 'supervisor': '3'},
-                {'username': 'grouplead2', 'age': 29, 'supervisor': '3'},
-                {'username': 'worker1', 'age': 25, 'supervisor' : '4'},
-                {'username': 'worker2', 'age': 24, 'supervisor' : '4'},
-                {'username': 'worker3', 'age': 23, 'supervisor' : '5'},
-                {'username': 'worker4', 'age': 22, 'supervisor' : '5'},
-                {'username': 'worker5', 'age': 21, 'supervisor' : '5'}):
-            u = self.db.user.create(**user)
-            u_m [u] = self.db.msg.create(author = u, content = ' '
-                , date = date.Date ('2006-01-%s' % k))
-            k -= 1
-        iss = self.db.issue
-        for issue in (
-                {'title': 'ts1', 'status': '2', 'assignedto': '6',
-                    'priority': '3', 'messages' : [u_m ['6']], 'nosy' : ['4']},
-                {'title': 'ts2', 'status': '1', 'assignedto': '6',
-                    'priority': '3', 'messages' : [u_m ['6']], 'nosy' : ['5']},
-                {'title': 'ts4', 'status': '2', 'assignedto': '7',
-                    'priority': '3', 'messages' : [u_m ['7']]},
-                {'title': 'ts5', 'status': '1', 'assignedto': '8',
-                    'priority': '3', 'messages' : [u_m ['8']]},
-                {'title': 'ts6', 'status': '2', 'assignedto': '9',
-                    'priority': '3', 'messages' : [u_m ['9']]},
-                {'title': 'ts7', 'status': '1', 'assignedto': '10',
-                    'priority': '3', 'messages' : [u_m ['10']]},
-                {'title': 'ts8', 'status': '2', 'assignedto': '10',
-                    'priority': '3', 'messages' : [u_m ['10']]},
-                {'title': 'ts9', 'status': '1', 'assignedto': '10',
-                    'priority': '3', 'messages' : [u_m ['10'], u_m ['9']]}):
-            self.db.issue.create(**issue)
-        return self.assertEqual, self.db.issue.filter
+        for filt in filter, filter_iter:
+            # ascending
+            ae(filt(None, {}, ('+','deadline'), ('+','priority')),
+                ['2', '1', '3', '4'])
+            ae(filt(None, {}, ('-','deadline'), ('+','priority')),
+                ['1', '2', '4', '3'])
+            # descending
+            ae(filt(None, {}, ('+','deadline'), ('-','priority')),
+                ['3', '4', '2', '1'])
+            ae(filt(None, {}, ('-','deadline'), ('-','priority')),
+                ['4', '3', '1', '2'])
 
     def testFilteringTransitiveLinkUser(self):
-        ae, filt = self.filteringSetupTransitiveSearch()
-        ufilt = self.db.user.filter
-        ae(ufilt(None, {'supervisor.username': 'ceo'}, ('+','username')),
-            ['4', '5'])
-        ae(ufilt(None, {'supervisor.supervisor.username': 'ceo'},
-            ('+','username')), ['6', '7', '8', '9', '10'])
-        ae(ufilt(None, {'supervisor.supervisor': '3'}, ('+','username')),
-            ['6', '7', '8', '9', '10'])
-        ae(ufilt(None, {'supervisor.supervisor.id': '3'}, ('+','username')),
-            ['6', '7', '8', '9', '10'])
-        ae(ufilt(None, {'supervisor.username': 'grouplead1'}, ('+','username')),
-            ['6', '7'])
-        ae(ufilt(None, {'supervisor.username': 'grouplead2'}, ('+','username')),
-            ['8', '9', '10'])
-        ae(ufilt(None, {'supervisor.username': 'grouplead2',
-            'supervisor.supervisor.username': 'ceo'}, ('+','username')),
-            ['8', '9', '10'])
-        ae(ufilt(None, {'supervisor.supervisor': '3', 'supervisor': '4'},
-            ('+','username')), ['6', '7'])
+        ae, filter, filter_iter = self.filteringSetupTransitiveSearch('user')
+        for f in filter, filter_iter:
+            ae(f(None, {'supervisor.username': 'ceo'}, ('+','username')),
+                ['4', '5'])
+            ae(f(None, {'supervisor.supervisor.username': 'ceo'},
+                ('+','username')), ['6', '7', '8', '9', '10'])
+            ae(f(None, {'supervisor.supervisor': '3'}, ('+','username')),
+                ['6', '7', '8', '9', '10'])
+            ae(f(None, {'supervisor.supervisor.id': '3'}, ('+','username')),
+                ['6', '7', '8', '9', '10'])
+            ae(f(None, {'supervisor.username': 'grouplead1'}, ('+','username')),
+                ['6', '7'])
+            ae(f(None, {'supervisor.username': 'grouplead2'}, ('+','username')),
+                ['8', '9', '10'])
+            ae(f(None, {'supervisor.username': 'grouplead2',
+                'supervisor.supervisor.username': 'ceo'}, ('+','username')),
+                ['8', '9', '10'])
+            ae(f(None, {'supervisor.supervisor': '3', 'supervisor': '4'},
+                ('+','username')), ['6', '7'])
 
     def testFilteringTransitiveLinkSort(self):
-        ae, filt = self.filteringSetupTransitiveSearch()
-        ufilt = self.db.user.filter
+        ae, filter, filter_iter = self.filteringSetupTransitiveSearch()
+        ae, ufilter, ufilter_iter = self.iterSetup('user')
         # Need to make ceo his own (and first two users') supervisor,
         # otherwise we will depend on sorting order of NULL values.
         # Leave that to a separate test.
         self.db.user.set('1', supervisor = '3')
         self.db.user.set('2', supervisor = '3')
         self.db.user.set('3', supervisor = '3')
-        ae(ufilt(None, {'supervisor':'3'}, []), ['1', '2', '3', '4', '5'])
-        ae(ufilt(None, {}, [('+','supervisor.supervisor.supervisor'),
-            ('+','supervisor.supervisor'), ('+','supervisor'),
-            ('+','username')]),
-            ['1', '3', '2', '4', '5', '6', '7', '8', '9', '10'])
-        ae(ufilt(None, {}, [('+','supervisor.supervisor.supervisor'),
-            ('-','supervisor.supervisor'), ('-','supervisor'),
-            ('+','username')]),
-            ['8', '9', '10', '6', '7', '1', '3', '2', '4', '5'])
-        ae(filt(None, {}, [('+','assignedto.supervisor.supervisor.supervisor'),
-            ('+','assignedto.supervisor.supervisor'),
-            ('+','assignedto.supervisor'), ('+','assignedto')]),
-            ['1', '2', '3', '4', '5', '6', '7', '8'])
-        ae(filt(None, {}, [('+','assignedto.supervisor.supervisor.supervisor'),
-            ('+','assignedto.supervisor.supervisor'),
-            ('-','assignedto.supervisor'), ('+','assignedto')]),
-            ['4', '5', '6', '7', '8', '1', '2', '3'])
-        ae(filt(None, {}, [('+','assignedto.supervisor.supervisor.supervisor'),
-            ('+','assignedto.supervisor.supervisor'),
-            ('+','assignedto.supervisor'), ('+','assignedto'),
-            ('-','status')]),
-            ['2', '1', '3', '4', '5', '6', '8', '7'])
-        ae(filt(None, {}, [('+','assignedto.supervisor.supervisor.supervisor'),
-            ('+','assignedto.supervisor.supervisor'),
-            ('+','assignedto.supervisor'), ('+','assignedto'),
-            ('+','status')]),
-            ['1', '2', '3', '4', '5', '7', '6', '8'])
-        ae(filt(None, {}, [('+','assignedto.supervisor.supervisor.supervisor'),
-            ('+','assignedto.supervisor.supervisor'),
-            ('-','assignedto.supervisor'), ('+','assignedto'), ('+','status')]),
-            ['4', '5', '7', '6', '8', '1', '2', '3'])
-        ae(filt(None, {'assignedto':['6','7','8','9','10']},
-            [('+','assignedto.supervisor.supervisor.supervisor'),
-            ('+','assignedto.supervisor.supervisor'),
-            ('-','assignedto.supervisor'), ('+','assignedto'), ('+','status')]),
-            ['4', '5', '7', '6', '8', '1', '2', '3'])
-        ae(filt(None, {'assignedto':['6','7','8','9']},
-            [('+','assignedto.supervisor.supervisor.supervisor'),
-            ('+','assignedto.supervisor.supervisor'),
-            ('-','assignedto.supervisor'), ('+','assignedto'), ('+','status')]),
-            ['4', '5', '1', '2', '3'])
+        for ufilt in ufilter, ufilter_iter:
+            ae(ufilt(None, {'supervisor':'3'}, []), ['1', '2', '3', '4', '5'])
+            ae(ufilt(None, {}, [('+','supervisor.supervisor.supervisor'),
+                ('+','supervisor.supervisor'), ('+','supervisor'),
+                ('+','username')]),
+                ['1', '3', '2', '4', '5', '6', '7', '8', '9', '10'])
+            ae(ufilt(None, {}, [('+','supervisor.supervisor.supervisor'),
+                ('-','supervisor.supervisor'), ('-','supervisor'),
+                ('+','username')]),
+                ['8', '9', '10', '6', '7', '1', '3', '2', '4', '5'])
+        for f in filter, filter_iter:
+            ae(f(None, {}, [('+','assignedto.supervisor.supervisor.supervisor'),
+                ('+','assignedto.supervisor.supervisor'),
+                ('+','assignedto.supervisor'), ('+','assignedto')]),
+                ['1', '2', '3', '4', '5', '6', '7', '8'])
+            ae(f(None, {}, [('+','assignedto.supervisor.supervisor.supervisor'),
+                ('+','assignedto.supervisor.supervisor'),
+                ('-','assignedto.supervisor'), ('+','assignedto')]),
+                ['4', '5', '6', '7', '8', '1', '2', '3'])
+            ae(f(None, {}, [('+','assignedto.supervisor.supervisor.supervisor'),
+                ('+','assignedto.supervisor.supervisor'),
+                ('+','assignedto.supervisor'), ('+','assignedto'),
+                ('-','status')]),
+                ['2', '1', '3', '4', '5', '6', '8', '7'])
+            ae(f(None, {}, [('+','assignedto.supervisor.supervisor.supervisor'),
+                ('+','assignedto.supervisor.supervisor'),
+                ('+','assignedto.supervisor'), ('+','assignedto'),
+                ('+','status')]),
+                ['1', '2', '3', '4', '5', '7', '6', '8'])
+            ae(f(None, {}, [('+','assignedto.supervisor.supervisor.supervisor'),
+                ('+','assignedto.supervisor.supervisor'),
+                ('-','assignedto.supervisor'), ('+','assignedto'),
+                ('+','status')]), ['4', '5', '7', '6', '8', '1', '2', '3'])
+            ae(f(None, {'assignedto':['6','7','8','9','10']},
+                [('+','assignedto.supervisor.supervisor.supervisor'),
+                ('+','assignedto.supervisor.supervisor'),
+                ('-','assignedto.supervisor'), ('+','assignedto'),
+                ('+','status')]), ['4', '5', '7', '6', '8', '1', '2', '3'])
+            ae(f(None, {'assignedto':['6','7','8','9']},
+                [('+','assignedto.supervisor.supervisor.supervisor'),
+                ('+','assignedto.supervisor.supervisor'),
+                ('-','assignedto.supervisor'), ('+','assignedto'),
+                ('+','status')]), ['4', '5', '1', '2', '3'])
 
     def testFilteringTransitiveLinkSortNull(self):
         """Check sorting of NULL values"""
-        ae, filt = self.filteringSetupTransitiveSearch()
-        ufilt = self.db.user.filter
-        ae(ufilt(None, {}, [('+','supervisor.supervisor.supervisor'),
-            ('+','supervisor.supervisor'), ('+','supervisor'),
-            ('+','username')]),
-            ['1', '3', '2', '4', '5', '6', '7', '8', '9', '10'])
-        ae(ufilt(None, {}, [('+','supervisor.supervisor.supervisor'),
-            ('-','supervisor.supervisor'), ('-','supervisor'),
-            ('+','username')]),
-            ['8', '9', '10', '6', '7', '4', '5', '1', '3', '2'])
-        ae(filt(None, {}, [('+','assignedto.supervisor.supervisor.supervisor'),
-            ('+','assignedto.supervisor.supervisor'),
-            ('+','assignedto.supervisor'), ('+','assignedto')]),
-            ['1', '2', '3', '4', '5', '6', '7', '8'])
-        ae(filt(None, {}, [('+','assignedto.supervisor.supervisor.supervisor'),
-            ('+','assignedto.supervisor.supervisor'),
-            ('-','assignedto.supervisor'), ('+','assignedto')]),
-            ['4', '5', '6', '7', '8', '1', '2', '3'])
+        ae, filter, filter_iter = self.filteringSetupTransitiveSearch()
+        ae, ufilter, ufilter_iter = self.iterSetup('user')
+        for ufilt in ufilter, ufilter_iter:
+            ae(ufilt(None, {}, [('+','supervisor.supervisor.supervisor'),
+                ('+','supervisor.supervisor'), ('+','supervisor'),
+                ('+','username')]),
+                ['1', '3', '2', '4', '5', '6', '7', '8', '9', '10'])
+            ae(ufilt(None, {}, [('+','supervisor.supervisor.supervisor'),
+                ('-','supervisor.supervisor'), ('-','supervisor'),
+                ('+','username')]),
+                ['8', '9', '10', '6', '7', '4', '5', '1', '3', '2'])
+        for f in filter, filter_iter:
+            ae(f(None, {}, [('+','assignedto.supervisor.supervisor.supervisor'),
+                ('+','assignedto.supervisor.supervisor'),
+                ('+','assignedto.supervisor'), ('+','assignedto')]),
+                ['1', '2', '3', '4', '5', '6', '7', '8'])
+            ae(f(None, {}, [('+','assignedto.supervisor.supervisor.supervisor'),
+                ('+','assignedto.supervisor.supervisor'),
+                ('-','assignedto.supervisor'), ('+','assignedto')]),
+                ['4', '5', '6', '7', '8', '1', '2', '3'])
 
     def testFilteringTransitiveLinkIssue(self):
-        ae, filt = self.filteringSetupTransitiveSearch()
-        ae(filt(None, {'assignedto.supervisor.username': 'grouplead1'},
-            ('+','id')), ['1', '2', '3'])
-        ae(filt(None, {'assignedto.supervisor.username': 'grouplead2'},
-            ('+','id')), ['4', '5', '6', '7', '8'])
-        ae(filt(None, {'assignedto.supervisor.username': 'grouplead2',
-                       'status': '1'}, ('+','id')), ['4', '6', '8'])
-        ae(filt(None, {'assignedto.supervisor.username': 'grouplead2',
-                       'status': '2'}, ('+','id')), ['5', '7'])
-        ae(filt(None, {'assignedto.supervisor.username': ['grouplead2'],
-                       'status': '2'}, ('+','id')), ['5', '7'])
-        ae(filt(None, {'assignedto.supervisor': ['4', '5'], 'status': '2'},
-            ('+','id')), ['1', '3', '5', '7'])
+        ae, filter, filter_iter = self.filteringSetupTransitiveSearch()
+        for filt in filter, filter_iter:
+            ae(filt(None, {'assignedto.supervisor.username': 'grouplead1'},
+                ('+','id')), ['1', '2', '3'])
+            ae(filt(None, {'assignedto.supervisor.username': 'grouplead2'},
+                ('+','id')), ['4', '5', '6', '7', '8'])
+            ae(filt(None, {'assignedto.supervisor.username': 'grouplead2',
+                           'status': '1'}, ('+','id')), ['4', '6', '8'])
+            ae(filt(None, {'assignedto.supervisor.username': 'grouplead2',
+                           'status': '2'}, ('+','id')), ['5', '7'])
+            ae(filt(None, {'assignedto.supervisor.username': ['grouplead2'],
+                           'status': '2'}, ('+','id')), ['5', '7'])
+            ae(filt(None, {'assignedto.supervisor': ['4', '5'], 'status': '2'},
+                ('+','id')), ['1', '3', '5', '7'])
 
     def testFilteringTransitiveMultilink(self):
-        ae, filt = self.filteringSetupTransitiveSearch()
-        ae(filt(None, {'messages.author.username': 'grouplead1'},
-            ('+','id')), [])
-        ae(filt(None, {'messages.author': '6'},
-            ('+','id')), ['1', '2'])
-        ae(filt(None, {'messages.author.id': '6'},
-            ('+','id')), ['1', '2'])
-        ae(filt(None, {'messages.author.username': 'worker1'},
-            ('+','id')), ['1', '2'])
-        ae(filt(None, {'messages.author': '10'},
-            ('+','id')), ['6', '7', '8'])
-        ae(filt(None, {'messages.author': '9'},
-            ('+','id')), ['5', '8'])
-        ae(filt(None, {'messages.author': ['9', '10']},
-            ('+','id')), ['5', '6', '7', '8'])
-        ae(filt(None, {'messages.author': ['8', '9']},
-            ('+','id')), ['4', '5', '8'])
-        ae(filt(None, {'messages.author': ['8', '9'], 'status' : '1'},
-            ('+','id')), ['4', '8'])
-        ae(filt(None, {'messages.author': ['8', '9'], 'status' : '2'},
-            ('+','id')), ['5'])
-        ae(filt(None, {'messages.author': ['8', '9', '10'],
-            'messages.date': '2006-01-22.21:00;2006-01-23'}, ('+','id')),
-            ['6', '7', '8'])
-        ae(filt(None, {'nosy.supervisor.username': 'ceo'},
-            ('+','id')), ['1', '2'])
-        ae(filt(None, {'messages.author': ['6', '9']},
-            ('+','id')), ['1', '2', '5', '8'])
-        ae(filt(None, {'messages': ['5', '7']},
-            ('+','id')), ['3', '5', '8'])
-        ae(filt(None, {'messages.author': ['6', '9'], 'messages': ['5', '7']},
-            ('+','id')), ['5', '8'])
+        ae, filter, filter_iter = self.filteringSetupTransitiveSearch()
+        for filt in filter, filter_iter:
+            ae(filt(None, {'messages.author.username': 'grouplead1'},
+                ('+','id')), [])
+            ae(filt(None, {'messages.author': '6'},
+                ('+','id')), ['1', '2'])
+            ae(filt(None, {'messages.author.id': '6'},
+                ('+','id')), ['1', '2'])
+            ae(filt(None, {'messages.author.username': 'worker1'},
+                ('+','id')), ['1', '2'])
+            ae(filt(None, {'messages.author': '10'},
+                ('+','id')), ['6', '7', '8'])
+            ae(filt(None, {'messages.author': '9'},
+                ('+','id')), ['5', '8'])
+            ae(filt(None, {'messages.author': ['9', '10']},
+                ('+','id')), ['5', '6', '7', '8'])
+            ae(filt(None, {'messages.author': ['8', '9']},
+                ('+','id')), ['4', '5', '8'])
+            ae(filt(None, {'messages.author': ['8', '9'], 'status' : '1'},
+                ('+','id')), ['4', '8'])
+            ae(filt(None, {'messages.author': ['8', '9'], 'status' : '2'},
+                ('+','id')), ['5'])
+            ae(filt(None, {'messages.author': ['8', '9', '10'],
+                'messages.date': '2006-01-22.21:00;2006-01-23'}, ('+','id')),
+                ['6', '7', '8'])
+            ae(filt(None, {'nosy.supervisor.username': 'ceo'},
+                ('+','id')), ['1', '2'])
+            ae(filt(None, {'messages.author': ['6', '9']},
+                ('+','id')), ['1', '2', '5', '8'])
+            ae(filt(None, {'messages': ['5', '7']},
+                ('+','id')), ['3', '5', '8'])
+            ae(filt(None, {'messages.author': ['6', '9'],
+                'messages': ['5', '7']}, ('+','id')), ['5', '8'])
 
     def testFilteringTransitiveMultilinkSort(self):
-        ae, filt = self.filteringSetupTransitiveSearch()
+        # Note that we don't test filter_iter here, Multilink sort-order
+        # isn't defined for that.
+        ae, filt, dummy = self.filteringSetupTransitiveSearch()
         ae(filt(None, {}, [('+','messages.author')]),
             ['1', '2', '3', '4', '5', '8', '6', '7'])
         ae(filt(None, {}, [('-','messages.author')]),
@@ -1607,9 +1674,10 @@
             ['3', '1', '2', '6', '7', '5', '4', '8'])
 
     def testFilteringSortId(self):
-        ae, filt = self.filteringSetupTransitiveSearch()
-        ae(self.db.user.filter(None, {}, ('+','id')),
-            ['1', '2', '3', '4', '5', '6', '7', '8', '9', '10'])
+        ae, filter, filter_iter = self.filteringSetupTransitiveSearch('user')
+        for filt in filter, filter_iter:
+            ae(filt(None, {}, ('+','id')),
+                ['1', '2', '3', '4', '5', '6', '7', '8', '9', '10'])
 
 # XXX add sorting tests for other types
 
@@ -1624,10 +1692,9 @@
         self.db = self.module.Database(config, 'admin')
         setupSchema(self.db, 0, self.module)
 
-
     def testImportExport(self):
         # use the filtering setup to create a bunch of items
-        ae, filt = self.filteringSetup()
+        ae, dummy1, dummy2 = self.filteringSetup()
         # Get some stuff into the journal for testing import/export of
         # journal data:
         self.db.user.set('4', password = password.Password('xyzzy'))
@@ -1740,7 +1807,7 @@
         import roundup.admin
         import csv
         # use the filtering setup to create a bunch of items
-        ae, filt = self.filteringSetup()
+        ae, dummy1, dummy2 = self.filteringSetup()
         # create large field
         self.db.priority.create(name = 'X' * 500)
         self.db.config.CSV_FIELD_SIZE = 400
@@ -1835,6 +1902,8 @@
         """Creates one issue with two attachments, one smaller and one larger
            than the set max_attachment_size.
         """
+        old_translate_ = roundupdb._
+        roundupdb._ = i18n.get_translation(language='C').gettext
         db = self.db
         db.config.NOSY_MAX_ATTACHMENT_SIZE = 4096
         res = dict(mail_to = None, mail_msg = None)
@@ -1852,15 +1921,16 @@
             db.issue.nosymessage(i, m, {})
             mail_msg = str(res["mail_msg"])
             self.assertEqual(res["mail_to"], ["fred at example.com"])
-            self.failUnless("From: admin" in mail_msg)
-            self.failUnless("Subject: [issue1] spam" in mail_msg)
-            self.failUnless("New submission from admin" in mail_msg)
-            self.failUnless("one two" in mail_msg)
-            self.failIf("File 'test1.txt' not attached" in mail_msg)
-            self.failUnless(base64.encodestring("xxx").rstrip() in mail_msg)
-            self.failUnless("File 'test2.txt' not attached" in mail_msg)
-            self.failIf(base64.encodestring("yyy").rstrip() in mail_msg)
+            self.assert_("From: admin" in mail_msg)
+            self.assert_("Subject: [issue1] spam" in mail_msg)
+            self.assert_("New submission from admin" in mail_msg)
+            self.assert_("one two" in mail_msg)
+            self.assert_("File 'test1.txt' not attached" not in mail_msg)
+            self.assert_(base64.encodestring("xxx").rstrip() in mail_msg)
+            self.assert_("File 'test2.txt' not attached" in mail_msg)
+            self.assert_(base64.encodestring("yyy").rstrip() not in mail_msg)
         finally :
+            roundupdb._ = old_translate_
             Mailer.smtp_send = backup
 
 class ROTest(MyTestCase):
@@ -1894,7 +1964,7 @@
         os.makedirs(config.DATABASE + '/files')
 
     def test_reservedProperties(self):
-        self.db = self.module.Database(config, 'admin')
+        self.open_database()
         self.assertRaises(ValueError, self.module.Class, self.db, "a",
             creation=String())
         self.assertRaises(ValueError, self.module.Class, self.db, "a",
@@ -1905,13 +1975,13 @@
             actor=String())
 
     def init_a(self):
-        self.db = self.module.Database(config, 'admin')
+        self.open_database()
         a = self.module.Class(self.db, "a", name=String())
         a.setkey("name")
         self.db.post_init()
 
     def test_fileClassProps(self):
-        self.db = self.module.Database(config, 'admin')
+        self.open_database()
         a = self.module.FileClass(self.db, 'a')
         l = a.getprops().keys()
         l.sort()
@@ -1919,7 +1989,7 @@
             'creation', 'type'])
 
     def init_ab(self):
-        self.db = self.module.Database(config, 'admin')
+        self.open_database()
         a = self.module.Class(self.db, "a", name=String())
         a.setkey("name")
         b = self.module.Class(self.db, "b", name=String(),
@@ -1957,7 +2027,7 @@
         self.db.getjournal('b', bid)
 
     def init_amod(self):
-        self.db = self.module.Database(config, 'admin')
+        self.open_database()
         a = self.module.Class(self.db, "a", name=String(), newstr=String(),
             newint=Interval(), newnum=Number(), newbool=Boolean(),
             newdate=Date())
@@ -2001,7 +2071,7 @@
         self.db.getjournal('a', aid2)
 
     def init_amodkey(self):
-        self.db = self.module.Database(config, 'admin')
+        self.open_database()
         a = self.module.Class(self.db, "a", name=String(), newstr=String())
         a.setkey("newstr")
         b = self.module.Class(self.db, "b", name=String())
@@ -2044,7 +2114,7 @@
 
 
     def init_amodml(self):
-        self.db = self.module.Database(config, 'admin')
+        self.open_database()
         a = self.module.Class(self.db, "a", name=String(),
             newml=Multilink('a'))
         a.setkey('name')
@@ -2118,6 +2188,123 @@
         self.assertEqual(self.db.sql_index_exists('_issue', '_issue_id_idx'), 1)
         self.assertEqual(self.db.sql_index_exists('_issue', '_issue_x_idx'), 0)
 
+class FilterCacheTest(commonDBTest):
+    def testFilteringTransitiveLinkCache(self):
+        ae, filter, filter_iter = self.filteringSetupTransitiveSearch()
+        ae, ufilter, ufilter_iter = self.iterSetup('user')
+        # Need to make ceo his own (and first two users') supervisor
+        self.db.user.set('1', supervisor = '3')
+        self.db.user.set('2', supervisor = '3')
+        self.db.user.set('3', supervisor = '3')
+        # test bool value
+        self.db.user.set('4', assignable = True)
+        self.db.user.set('3', assignable = False)
+        filt = self.db.issue.filter_iter
+        ufilt = self.db.user.filter_iter
+        user_result = \
+            {  '1' : {'username': 'admin', 'assignable': None,
+                      'supervisor': '3', 'realname': None, 'roles': 'Admin',
+                      'creator': '1', 'age': None, 'actor': '1',
+                      'address': None}
+            ,  '2' : {'username': 'fred', 'assignable': None,
+                      'supervisor': '3', 'realname': None, 'roles': 'User',
+                      'creator': '1', 'age': None, 'actor': '1',
+                      'address': 'fred at example.com'}
+            ,  '3' : {'username': 'ceo', 'assignable': False,
+                      'supervisor': '3', 'realname': None, 'roles': None,
+                      'creator': '1', 'age': 129.0, 'actor': '1',
+                      'address': None}
+            ,  '4' : {'username': 'grouplead1', 'assignable': True,
+                      'supervisor': '3', 'realname': None, 'roles': None,
+                      'creator': '1', 'age': 29.0, 'actor': '1',
+                      'address': None}
+            ,  '5' : {'username': 'grouplead2', 'assignable': None,
+                      'supervisor': '3', 'realname': None, 'roles': None,
+                      'creator': '1', 'age': 29.0, 'actor': '1',
+                      'address': None}
+            ,  '6' : {'username': 'worker1', 'assignable': None,
+                      'supervisor': '4', 'realname': None, 'roles': None,
+                      'creator': '1', 'age': 25.0, 'actor': '1',
+                      'address': None}
+            ,  '7' : {'username': 'worker2', 'assignable': None,
+                      'supervisor': '4', 'realname': None, 'roles': None,
+                      'creator': '1', 'age': 24.0, 'actor': '1',
+                      'address': None}
+            ,  '8' : {'username': 'worker3', 'assignable': None,
+                      'supervisor': '5', 'realname': None, 'roles': None,
+                      'creator': '1', 'age': 23.0, 'actor': '1',
+                      'address': None}
+            ,  '9' : {'username': 'worker4', 'assignable': None,
+                      'supervisor': '5', 'realname': None, 'roles': None,
+                      'creator': '1', 'age': 22.0, 'actor': '1',
+                      'address': None}
+            , '10' : {'username': 'worker5', 'assignable': None,
+                      'supervisor': '5', 'realname': None, 'roles': None,
+                      'creator': '1', 'age': 21.0, 'actor': '1',
+                      'address': None}
+            }
+        foo = date.Interval('-1d')
+        issue_result = \
+            { '1' : {'title': 'ts1', 'status': '2', 'assignedto': '6',
+                     'priority': '3', 'messages' : ['4'], 'nosy' : ['4']}
+            , '2' : {'title': 'ts2', 'status': '1', 'assignedto': '6',
+                     'priority': '3', 'messages' : ['4'], 'nosy' : ['5']}
+            , '3' : {'title': 'ts4', 'status': '2', 'assignedto': '7',
+                     'priority': '3', 'messages' : ['5']}
+            , '4' : {'title': 'ts5', 'status': '1', 'assignedto': '8',
+                     'priority': '3', 'messages' : ['6']}
+            , '5' : {'title': 'ts6', 'status': '2', 'assignedto': '9',
+                     'priority': '3', 'messages' : ['7']}
+            , '6' : {'title': 'ts7', 'status': '1', 'assignedto': '10',
+                     'priority': '3', 'messages' : ['8'], 'foo' : None}
+            , '7' : {'title': 'ts8', 'status': '2', 'assignedto': '10',
+                     'priority': '3', 'messages' : ['8'], 'foo' : foo}
+            , '8' : {'title': 'ts9', 'status': '1', 'assignedto': '10',
+                     'priority': '3', 'messages' : ['7', '8']}
+            }
+        result = []
+        self.db.clearCache()
+        for id in ufilt(None, {}, [('+','supervisor.supervisor.supervisor'),
+            ('-','supervisor.supervisor'), ('-','supervisor'),
+            ('+','username')]):
+            result.append(id)
+            nodeid = id
+            for x in range(4):
+                assert(('user', nodeid) in self.db.cache)
+                n = self.db.user.getnode(nodeid)
+                for k, v in user_result[nodeid].iteritems():
+                    ae((k, n[k]), (k, v))
+                for k in 'creation', 'activity':
+                    assert(n[k])
+                nodeid = n.supervisor
+            self.db.clearCache()
+        ae (result, ['8', '9', '10', '6', '7', '1', '3', '2', '4', '5'])
+
+        result = []
+        self.db.clearCache()
+        for id in filt(None, {},
+            [('+','assignedto.supervisor.supervisor.supervisor'),
+            ('+','assignedto.supervisor.supervisor'),
+            ('-','assignedto.supervisor'), ('+','assignedto')]):
+            result.append(id)
+            assert(('issue', id) in self.db.cache)
+            n = self.db.issue.getnode(id)
+            for k, v in issue_result[id].iteritems():
+                ae((k, n[k]), (k, v))
+            for k in 'creation', 'activity':
+                assert(n[k])
+            nodeid = n.assignedto
+            for x in range(4):
+                assert(('user', nodeid) in self.db.cache)
+                n = self.db.user.getnode(nodeid)
+                for k, v in user_result[nodeid].iteritems():
+                    ae((k, n[k]), (k, v))
+                for k in 'creation', 'activity':
+                    assert(n[k])
+                nodeid = n.supervisor
+            self.db.clearCache()
+        ae (result, ['4', '5', '6', '7', '8', '1', '2', '3'])
+
 
 class ClassicInitTest(unittest.TestCase):
     count = 0
@@ -2166,4 +2353,36 @@
         except OSError, error:
             if error.errno not in (errno.ENOENT, errno.ESRCH): raise
 
+class ConcurrentDBTest(ClassicInitTest):
+    def testConcurrency(self):
+        # The idea here is a read-modify-update cycle in the presence of
+        # a cache that has to be properly handled. The same applies if
+        # we extend a String or otherwise modify something that depends
+        # on the previous value.
+
+        # set up and open a tracker
+        tracker = setupTracker(self.dirname, self.backend)
+        # open the database
+        self.db = tracker.open('admin')
+
+        prio = '1'
+        self.assertEqual(self.db.priority.get(prio, 'order'), 1.0)
+        def inc(db):
+            db.priority.set(prio, order=db.priority.get(prio, 'order') + 1)
+
+        inc(self.db)
+
+        db2 = tracker.open("admin")
+        self.assertEqual(db2.priority.get(prio, 'order'), 1.0)
+        db2.commit()
+        self.db.commit()
+        self.assertEqual(self.db.priority.get(prio, 'order'), 2.0)
+
+        inc(db2)
+        db2.commit()
+        db2.clearCache()
+        self.assertEqual(db2.priority.get(prio, 'order'), 3.0)
+        db2.close()
+
+
 # vim: set et sts=4 sw=4 :

Added: tracker/roundup-src/test/memorydb.py
==============================================================================
--- (empty file)
+++ tracker/roundup-src/test/memorydb.py	Thu Aug  4 15:46:52 2011
@@ -0,0 +1,434 @@
+# $Id: test_memorydb.py,v 1.4 2004-11-03 01:34:21 richard Exp $ 
+'''Implement an in-memory hyperdb for testing purposes.
+'''
+
+import shutil
+
+from roundup import hyperdb
+from roundup import roundupdb
+from roundup import security
+from roundup import password
+from roundup import configuration
+from roundup.backends import back_anydbm
+from roundup.backends import indexer_dbm
+from roundup.backends import sessions_dbm
+from roundup.backends import indexer_common
+from roundup.hyperdb import *
+from roundup.support import ensureParentsExist
+
+def new_config(debug=False):
+    config = configuration.CoreConfig()
+    config.DATABASE = "db"
+    #config.logging = MockNull()
+    # these TRACKER_WEB and MAIL_DOMAIN values are used in mailgw tests
+    if debug:
+        config.LOGGING_LEVEL = "DEBUG"
+    config.MAIL_DOMAIN = "your.tracker.email.domain.example"
+    config.TRACKER_WEB = "http://tracker.example/cgi-bin/roundup.cgi/bugs/"
+    return config
+
+def create(journaltag, create=True, debug=False):
+    db = Database(new_config(debug), journaltag)
+
+    # load standard schema
+    schema = os.path.join(os.path.dirname(__file__),
+        '../share/roundup/templates/classic/schema.py')
+    vars = dict(globals())
+    vars['db'] = db
+    execfile(schema, vars)
+    initial_data = os.path.join(os.path.dirname(__file__),
+        '../share/roundup/templates/classic/initial_data.py')
+    vars = dict(db=db, admin_email='admin at test.com',
+        adminpw=password.Password('sekrit'))
+    execfile(initial_data, vars)
+
+    # load standard detectors
+    dirname = os.path.join(os.path.dirname(__file__),
+        '../share/roundup/templates/classic/detectors')
+    for fn in os.listdir(dirname):
+        if not fn.endswith('.py'): continue
+        vars = {}
+        execfile(os.path.join(dirname, fn), vars)
+        vars['init'](db)
+
+    '''
+    status = Class(db, "status", name=String())
+    status.setkey("name")
+    priority = Class(db, "priority", name=String(), order=String())
+    priority.setkey("name")
+    keyword = Class(db, "keyword", name=String(), order=String())
+    keyword.setkey("name")
+    user = Class(db, "user", username=String(), password=Password(),
+        assignable=Boolean(), age=Number(), roles=String(), address=String(),
+        supervisor=Link('user'),realname=String(),alternate_addresses=String())
+    user.setkey("username")
+    file = FileClass(db, "file", name=String(), type=String(),
+        comment=String(indexme="yes"), fooz=Password())
+    file_nidx = FileClass(db, "file_nidx", content=String(indexme='no'))
+    issue = IssueClass(db, "issue", title=String(indexme="yes"),
+        status=Link("status"), nosy=Multilink("user"), deadline=Date(),
+        foo=Interval(), files=Multilink("file"), assignedto=Link('user'),
+        priority=Link('priority'), spam=Multilink('msg'),
+        feedback=Link('msg'))
+    stuff = Class(db, "stuff", stuff=String())
+    session = Class(db, 'session', title=String())
+    msg = FileClass(db, "msg", date=Date(),
+                           author=Link("user", do_journal='no'),
+                           files=Multilink('file'), inreplyto=String(),
+                           messageid=String(), summary=String(),
+                           content=String(),
+                           recipients=Multilink("user", do_journal='no')
+                           )
+    '''
+    if create:
+        db.user.create(username="fred", roles='User',
+            password=password.Password('sekrit'), address='fred at example.com')
+
+    db.security.addPermissionToRole('User', 'Email Access')
+    '''
+    db.security.addPermission(name='Register', klass='user')
+    db.security.addPermissionToRole('User', 'Web Access')
+    db.security.addPermissionToRole('Anonymous', 'Email Access')
+    db.security.addPermissionToRole('Anonymous', 'Register', 'user')
+    for cl in 'issue', 'file', 'msg', 'keyword':
+        db.security.addPermissionToRole('User', 'View', cl)
+        db.security.addPermissionToRole('User', 'Edit', cl)
+        db.security.addPermissionToRole('User', 'Create', cl)
+    for cl in 'priority', 'status':
+        db.security.addPermissionToRole('User', 'View', cl)
+    '''
+    return db
+
+class cldb(dict):
+    def close(self):
+        pass
+
+class BasicDatabase(dict):
+    ''' Provide a nice encapsulation of an anydbm store.
+
+        Keys are id strings, values are automatically marshalled data.
+    '''
+    def __getitem__(self, key):
+        if key not in self:
+            d = self[key] = {}
+            return d
+        return super(BasicDatabase, self).__getitem__(key)
+    def exists(self, infoid):
+        return infoid in self
+    def get(self, infoid, value, default=None):
+        return self[infoid].get(value, default)
+    def getall(self, infoid):
+        if infoid not in self:
+            raise KeyError(infoid)
+        return self[infoid]
+    def set(self, infoid, **newvalues):
+        self[infoid].update(newvalues)
+    def list(self):
+        return self.keys()
+    def destroy(self, infoid):
+        del self[infoid]
+    def commit(self):
+        pass
+    def close(self):
+        pass
+    def updateTimestamp(self, sessid):
+        pass
+    def clean(self):
+        pass
+
+class Sessions(BasicDatabase, sessions_dbm.Sessions):
+    name = 'sessions'
+
+class OneTimeKeys(BasicDatabase, sessions_dbm.Sessions):
+    name = 'otks'
+
+class Indexer(indexer_dbm.Indexer):
+    def __init__(self, db):
+        indexer_common.Indexer.__init__(self, db)
+        self.reindex = 0
+        self.quiet = 9
+        self.changed = 0
+
+    def load_index(self, reload=0, wordlist=None):
+        # Unless reload is indicated, do not load twice
+        if self.index_loaded() and not reload:
+            return 0
+        self.words = {}
+        self.files = {'_TOP':(0,None)}
+        self.fileids = {}
+        self.changed = 0
+
+    def save_index(self):
+        pass
+    def force_reindex(self):
+        # TODO I'm concerned that force_reindex may not be tested by
+        # testForcedReindexing if the functionality can just be removed
+        pass
+
+class Database(back_anydbm.Database):
+    """A database for storing records containing flexible data types.
+
+    Transaction stuff TODO:
+
+    - check the timestamp of the class file and nuke the cache if it's
+      modified. Do some sort of conflict checking on the dirty stuff.
+    - perhaps detect write collisions (related to above)?
+    """
+    def __init__(self, config, journaltag=None):
+        self.config, self.journaltag = config, journaltag
+        self.classes = {}
+        self.items = {}
+        self.ids = {}
+        self.journals = {}
+        self.files = {}
+        self.tx_files = {}
+        self.security = security.Security(self)
+        self.stats = {'cache_hits': 0, 'cache_misses': 0, 'get_items': 0,
+            'filtering': 0}
+        self.sessions = Sessions()
+        self.otks = OneTimeKeys()
+        self.indexer = Indexer(self)
+
+        # anydbm bits
+        self.cache = {}         # cache of nodes loaded or created
+        self.dirtynodes = {}    # keep track of the dirty nodes by class
+        self.newnodes = {}      # keep track of the new nodes by class
+        self.destroyednodes = {}# keep track of the destroyed nodes by class
+        self.transactions = []
+
+    def filename(self, classname, nodeid, property=None, create=0):
+        shutil.copyfile(__file__, __file__+'.dummy')
+        return __file__+'.dummy'
+
+    def filesize(self, classname, nodeid, property=None, create=0):
+        return len(self.getfile(classname, nodeid, property))
+
+    def post_init(self):
+        pass
+
+    def refresh_database(self):
+        pass
+
+    def getSessionManager(self):
+        return self.sessions
+
+    def getOTKManager(self):
+        return self.otks
+
+    def reindex(self, classname=None, show_progress=False):
+        pass
+
+    def __repr__(self):
+        return '<memorydb instance at %x>'%id(self)
+
+    def storefile(self, classname, nodeid, property, content):
+        self.tx_files[classname, nodeid, property] = content
+        self.transactions.append((self.doStoreFile, (classname, nodeid,
+            property)))
+
+    def getfile(self, classname, nodeid, property):
+        if (classname, nodeid, property) in self.tx_files:
+            return self.tx_files[classname, nodeid, property]
+        return self.files[classname, nodeid, property]
+
+    def doStoreFile(self, classname, nodeid, property, **databases):
+        self.files[classname, nodeid, property] = self.tx_files[classname, nodeid, property]
+        return (classname, nodeid)
+
+    def rollbackStoreFile(self, classname, nodeid, property, **databases):
+        del self.tx_files[classname, nodeid, property]
+
+    def numfiles(self):
+        return len(self.files) + len(self.tx_files)
+
+    def close(self):
+        self.clearCache()
+        self.tx_files = {}
+        # kill the schema too
+        self.classes = {}
+        # just keep the .items
+
+    #
+    # Classes
+    #
+    def __getattr__(self, classname):
+        """A convenient way of calling self.getclass(classname)."""
+        if self.classes.has_key(classname):
+            return self.classes[classname]
+        raise AttributeError, classname
+
+    def addclass(self, cl):
+        cn = cl.classname
+        if self.classes.has_key(cn):
+            raise ValueError, cn
+        self.classes[cn] = cl
+        if cn not in self.items:
+            self.items[cn] = cldb()
+            self.ids[cn] = 0
+
+        # add default Edit and View permissions
+        self.security.addPermission(name="Create", klass=cn,
+            description="User is allowed to create "+cn)
+        self.security.addPermission(name="Edit", klass=cn,
+            description="User is allowed to edit "+cn)
+        self.security.addPermission(name="View", klass=cn,
+            description="User is allowed to access "+cn)
+
+    def getclasses(self):
+        """Return a list of the names of all existing classes."""
+        l = self.classes.keys()
+        l.sort()
+        return l
+
+    def getclass(self, classname):
+        """Get the Class object representing a particular class.
+
+        If 'classname' is not a valid class name, a KeyError is raised.
+        """
+        try:
+            return self.classes[classname]
+        except KeyError:
+            raise KeyError, 'There is no class called "%s"'%classname
+
+    #
+    # Class DBs
+    #
+    def clear(self):
+        self.items = {}
+
+    def getclassdb(self, classname, mode='r'):
+        """ grab a connection to the class db that will be used for
+            multiple actions
+        """
+        return self.items[classname]
+
+    def getCachedJournalDB(self, classname):
+        return self.journals.setdefault(classname, {})
+
+    #
+    # Node IDs
+    #
+    def newid(self, classname):
+        self.ids[classname] += 1
+        return str(self.ids[classname])
+    def setid(self, classname, id):
+        self.ids[classname] = int(id)
+
+    #
+    # Journal
+    #
+    def doSaveJournal(self, classname, nodeid, action, params, creator,
+            creation):
+        if creator is None:
+            creator = self.getuid()
+        if creation is None:
+            creation = date.Date()
+        self.journals.setdefault(classname, {}).setdefault(nodeid,
+            []).append((nodeid, creation, creator, action, params))
+
+    def doSetJournal(self, classname, nodeid, journal):
+        self.journals.setdefault(classname, {})[nodeid] = journal
+
+    def getjournal(self, classname, nodeid):
+        # our journal result
+        res = []
+
+        # add any journal entries for transactions not committed to the
+        # database
+        for method, args in self.transactions:
+            if method != self.doSaveJournal:
+                continue
+            (cache_classname, cache_nodeid, cache_action, cache_params,
+                cache_creator, cache_creation) = args
+            if cache_classname == classname and cache_nodeid == nodeid:
+                if not cache_creator:
+                    cache_creator = self.getuid()
+                if not cache_creation:
+                    cache_creation = date.Date()
+                res.append((cache_nodeid, cache_creation, cache_creator,
+                    cache_action, cache_params))
+        try:
+            res += self.journals.get(classname, {})[nodeid]
+        except KeyError:
+            if res: return res
+            raise IndexError, nodeid
+        return res
+
+    def pack(self, pack_before):
+        """ Delete all journal entries except "create" before 'pack_before'.
+        """
+        pack_before = pack_before.serialise()
+        for classname in self.journals:
+            db = self.journals[classname]
+            for key in db:
+                # get the journal for this db entry
+                l = []
+                last_set_entry = None
+                for entry in db[key]:
+                    # unpack the entry
+                    (nodeid, date_stamp, self.journaltag, action,
+                        params) = entry
+                    date_stamp = date_stamp.serialise()
+                    # if the entry is after the pack date, _or_ the initial
+                    # create entry, then it stays
+                    if date_stamp > pack_before or action == 'create':
+                        l.append(entry)
+                db[key] = l
+
+class Class(back_anydbm.Class):
+    pass
+
+class FileClass(back_anydbm.FileClass):
+    def __init__(self, db, classname, **properties):
+        if not properties.has_key('content'):
+            properties['content'] = hyperdb.String(indexme='yes')
+        if not properties.has_key('type'):
+            properties['type'] = hyperdb.String()
+        back_anydbm.Class.__init__(self, db, classname, **properties)
+
+    def export_files(self, dirname, nodeid):
+        dest = self.exportFilename(dirname, nodeid)
+        ensureParentsExist(dest)
+        f = open(dest, 'wb')
+        f.write(self.db.files[self.classname, nodeid, None])
+        f.close()
+
+    def import_files(self, dirname, nodeid):
+        source = self.exportFilename(dirname, nodeid)
+        f = open(source, 'rb')
+        self.db.files[self.classname, nodeid, None] = f.read()
+        f.close()
+        mime_type = None
+        props = self.getprops()
+        if props.has_key('type'):
+            mime_type = self.get(nodeid, 'type')
+        if not mime_type:
+            mime_type = self.default_mime_type
+        if props['content'].indexme:
+            self.db.indexer.add_text((self.classname, nodeid, 'content'),
+                self.get(nodeid, 'content'), mime_type)
+
+# deviation from spec - was called ItemClass
+class IssueClass(Class, roundupdb.IssueClass):
+    # Overridden methods:
+    def __init__(self, db, classname, **properties):
+        """The newly-created class automatically includes the "messages",
+        "files", "nosy", and "superseder" properties.  If the 'properties'
+        dictionary attempts to specify any of these properties or a
+        "creation" or "activity" property, a ValueError is raised.
+        """
+        if not properties.has_key('title'):
+            properties['title'] = hyperdb.String(indexme='yes')
+        if not properties.has_key('messages'):
+            properties['messages'] = hyperdb.Multilink("msg")
+        if not properties.has_key('files'):
+            properties['files'] = hyperdb.Multilink("file")
+        if not properties.has_key('nosy'):
+            # note: journalling is turned off as it really just wastes
+            # space. this behaviour may be overridden in an instance
+            properties['nosy'] = hyperdb.Multilink("user", do_journal="no")
+        if not properties.has_key('superseder'):
+            properties['superseder'] = hyperdb.Multilink(classname)
+        Class.__init__(self, db, classname, **properties)
+
+# vim: set et sts=4 sw=4 :

Modified: tracker/roundup-src/test/session_common.py
==============================================================================
--- tracker/roundup-src/test/session_common.py	(original)
+++ tracker/roundup-src/test/session_common.py	Thu Aug  4 15:46:52 2011
@@ -20,6 +20,23 @@
         if os.path.exists(config.DATABASE):
             shutil.rmtree(config.DATABASE)
 
+    def testList(self):
+        self.sessions.list()
+        self.sessions.set('random_key', text='hello, world!')
+        self.sessions.list()
+
+    def testGetAll(self):
+        self.sessions.set('random_key', text='hello, world!')
+        self.assertEqual(self.sessions.getall('random_key'),
+            {'text': 'hello, world!'})
+
+    def testDestroy(self):
+        self.sessions.set('random_key', text='hello, world!')
+        self.assertEquals(self.sessions.getall('random_key'),
+            {'text': 'hello, world!'})
+        self.sessions.destroy('random_key')
+        self.assertRaises(KeyError, self.sessions.getall, 'random_key')
+
     def testSetSession(self):
         self.sessions.set('random_key', text='hello, world!')
         self.assertEqual(self.sessions.get('random_key', 'text'),

Modified: tracker/roundup-src/test/test_actions.py
==============================================================================
--- tracker/roundup-src/test/test_actions.py	(original)
+++ tracker/roundup-src/test/test_actions.py	Thu Aug  4 15:46:52 2011
@@ -249,6 +249,7 @@
             ({'messages':hyperdb.Multilink('msg')
              ,'content':hyperdb.String()
              ,'files':hyperdb.Multilink('file')
+             ,'msg':hyperdb.Link('msg')
              })
         self.action = EditItemAction(self.client)
 
@@ -298,6 +299,19 @@
             )
         try :
             self.action.handle()
+        except Redirect, msg:
+            pass
+        self.assertEqual(expect, self.result)
+
+    def testLinkNewToExisting(self):
+        expect = [('create',(),{'msg':'1','title':'TEST'})]
+        self.client.db.classes.get = lambda a, b:['23','42']
+        self.client.parsePropsFromForm = lambda: \
+            ( {('issue','-1'):{'title':'TEST'},('msg','1'):{}}
+            , [('issue','-1','msg',[('msg','1')])]
+            )
+        try :
+            self.action.handle()
         except Redirect, msg:
             pass
         self.assertEqual(expect, self.result)

Modified: tracker/roundup-src/test/test_cgi.py
==============================================================================
--- tracker/roundup-src/test/test_cgi.py	(original)
+++ tracker/roundup-src/test/test_cgi.py	Thu Aug  4 15:46:52 2011
@@ -14,7 +14,7 @@
 
 from roundup.cgi import client, actions, exceptions
 from roundup.cgi.exceptions import FormError
-from roundup.cgi.templating import HTMLItem
+from roundup.cgi.templating import HTMLItem, HTMLRequest
 from roundup.cgi.form_parser import FormParser
 from roundup import init, instance, password, hyperdb, date
 
@@ -425,6 +425,44 @@
             ':confirm:password': ''}, 'user', nodeid),
             ({('user', nodeid): {}}, []))
 
+    def testPasswordMigration(self):
+        chef = self.db.user.lookup('Chef')
+        form = dict(__login_name='Chef', __login_password='foo')
+        cl = self._make_client(form)
+        # assume that the "best" algorithm is the first one and doesn't
+        # need migration, all others should be migrated.
+        for scheme in password.Password.deprecated_schemes:
+            pw1 = password.Password('foo', scheme=scheme)
+            self.assertEqual(pw1.needs_migration(), True)
+            self.db.user.set(chef, password=pw1)
+            self.db.commit()
+            actions.LoginAction(cl).handle()
+            pw = self.db.user.get(chef, 'password')
+            self.assertEqual(pw, 'foo')
+            self.assertEqual(pw.needs_migration(), False)
+        pw1 = pw
+        self.assertEqual(pw1.needs_migration(), False)
+        scheme = password.Password.known_schemes[0]
+        self.assertEqual(scheme, pw1.scheme)
+        actions.LoginAction(cl).handle()
+        pw = self.db.user.get(chef, 'password')
+        self.assertEqual(pw, 'foo')
+        self.assertEqual(pw, pw1)
+
+    def testPasswordConfigOption(self):
+        chef = self.db.user.lookup('Chef')
+        form = dict(__login_name='Chef', __login_password='foo')
+        cl = self._make_client(form)
+        self.db.config.PASSWORD_PBKDF2_DEFAULT_ROUNDS = 1000
+        pw1 = password.Password('foo', scheme='crypt')
+        self.assertEqual(pw1.needs_migration(), True)
+        self.db.user.set(chef, password=pw1)
+        self.db.commit()
+        actions.LoginAction(cl).handle()
+        pw = self.db.user.get(chef, 'password')
+        self.assertEqual('PBKDF2', pw.scheme)
+        self.assertEqual(1000, password.pbkdf2_unpack(pw.password)[0])
+
     #
     # Boolean
     #
@@ -616,14 +654,18 @@
     # SECURITY
     #
     # XXX test all default permissions
-    def _make_client(self, form, classname='user', nodeid='1', userid='2'):
+    def _make_client(self, form, classname='user', nodeid='1',
+           userid='2', template='item'):
         cl = client.Client(self.instance, None, {'PATH_INFO':'/',
             'REQUEST_METHOD':'POST'}, makeForm(form))
-        cl.classname = 'user'
-        cl.nodeid = nodeid
+        cl.classname = classname
+        if nodeid is not None:
+            cl.nodeid = nodeid
         cl.db = self.db
         cl.userid = userid
         cl.language = ('en',)
+        cl.error_message = []
+        cl.template = template
         return cl
 
     def testClassPermission(self):
@@ -636,7 +678,8 @@
 
     def testCheckAndPropertyPermission(self):
         self.db.security.permissions = {}
-        def own_record(db, userid, itemid): return userid == itemid
+        def own_record(db, userid, itemid):
+            return userid == itemid
         p = self.db.security.addPermission(name='Edit', klass='user',
             check=own_record, properties=("password", ))
         self.db.security.addPermissionToRole('User', p)
@@ -644,10 +687,231 @@
         cl = self._make_client(dict(username='bob'))
         self.assertRaises(exceptions.Unauthorised,
             actions.EditItemAction(cl).handle)
+        cl = self._make_client(dict(roles='User,Admin'), userid='4', nodeid='4')
+        self.assertRaises(exceptions.Unauthorised,
+            actions.EditItemAction(cl).handle)
+        cl = self._make_client(dict(roles='User,Admin'), userid='4')
+        self.assertRaises(exceptions.Unauthorised,
+            actions.EditItemAction(cl).handle)
+        cl = self._make_client(dict(roles='User,Admin'))
+        self.assertRaises(exceptions.Unauthorised,
+            actions.EditItemAction(cl).handle)
+        # working example, mary may change her pw
+        cl = self._make_client({'password':'ob', '@confirm at password':'ob'},
+            nodeid='4', userid='4')
+        self.assertRaises(exceptions.Redirect,
+            actions.EditItemAction(cl).handle)
         cl = self._make_client({'password':'bob', '@confirm at password':'bob'})
         self.failUnlessRaises(exceptions.Unauthorised,
             actions.EditItemAction(cl).handle)
 
+    def testCreatePermission(self):
+        # this checks if we properly differentiate between create and
+        # edit permissions
+        self.db.security.permissions = {}
+        self.db.security.addRole(name='UserAdd')
+        # Don't allow roles
+        p = self.db.security.addPermission(name='Create', klass='user',
+            properties=("username", "password", "address",
+            "alternate_address", "realname", "phone", "organisation",
+            "timezone"))
+        self.db.security.addPermissionToRole('UserAdd', p)
+        # Don't allow roles *and* don't allow username
+        p = self.db.security.addPermission(name='Edit', klass='user',
+            properties=("password", "address", "alternate_address",
+            "realname", "phone", "organisation", "timezone"))
+        self.db.security.addPermissionToRole('UserAdd', p)
+        self.db.user.set('4', roles='UserAdd')
+
+        # anonymous may not
+        cl = self._make_client({'username':'new_user', 'password':'secret',
+            '@confirm at password':'secret', 'address':'new_user at bork.bork',
+            'roles':'Admin'}, nodeid=None, userid='2')
+        self.assertRaises(exceptions.Unauthorised,
+            actions.NewItemAction(cl).handle)
+        # Don't allow creating new user with roles
+        cl = self._make_client({'username':'new_user', 'password':'secret',
+            '@confirm at password':'secret', 'address':'new_user at bork.bork',
+            'roles':'Admin'}, nodeid=None, userid='4')
+        self.assertRaises(exceptions.Unauthorised,
+            actions.NewItemAction(cl).handle)
+        self.assertEqual(cl.error_message,[])
+        # this should work
+        cl = self._make_client({'username':'new_user', 'password':'secret',
+            '@confirm at password':'secret', 'address':'new_user at bork.bork'},
+            nodeid=None, userid='4')
+        self.assertRaises(exceptions.Redirect,
+            actions.NewItemAction(cl).handle)
+        self.assertEqual(cl.error_message,[])
+        # don't allow changing (my own) username (in this example)
+        cl = self._make_client(dict(username='new_user42'), userid='4')
+        self.assertRaises(exceptions.Unauthorised,
+            actions.EditItemAction(cl).handle)
+        cl = self._make_client(dict(username='new_user42'), userid='4',
+            nodeid='4')
+        self.assertRaises(exceptions.Unauthorised,
+            actions.EditItemAction(cl).handle)
+        # don't allow changing (my own) roles
+        cl = self._make_client(dict(roles='User,Admin'), userid='4',
+            nodeid='4')
+        self.assertRaises(exceptions.Unauthorised,
+            actions.EditItemAction(cl).handle)
+        cl = self._make_client(dict(roles='User,Admin'), userid='4')
+        self.assertRaises(exceptions.Unauthorised,
+            actions.EditItemAction(cl).handle)
+        cl = self._make_client(dict(roles='User,Admin'))
+        self.assertRaises(exceptions.Unauthorised,
+            actions.EditItemAction(cl).handle)
+
+    def testSearchPermission(self):
+        # this checks if we properly check for search permissions
+        self.db.security.permissions = {}
+        self.db.security.addRole(name='User')
+        self.db.security.addRole(name='Project')
+        self.db.security.addPermissionToRole('User', 'Web Access')
+        self.db.security.addPermissionToRole('Project', 'Web Access')
+        # Allow viewing department
+        p = self.db.security.addPermission(name='View', klass='department')
+        self.db.security.addPermissionToRole('User', p)
+        # Allow viewing interesting things (but not department) on iss
+        # But users might only view issues where they are on nosy
+        # (so in the real world the check method would be better)
+        p = self.db.security.addPermission(name='View', klass='iss',
+            properties=("title", "status"), check=lambda x,y,z: True)
+        self.db.security.addPermissionToRole('User', p)
+        # Allow all relevant roles access to stat
+        p = self.db.security.addPermission(name='View', klass='stat')
+        self.db.security.addPermissionToRole('User', p)
+        self.db.security.addPermissionToRole('Project', p)
+        # Allow role "Project" access to whole iss
+        p = self.db.security.addPermission(name='View', klass='iss')
+        self.db.security.addPermissionToRole('Project', p)
+
+        department = self.instance.backend.Class(self.db, "department",
+            name=hyperdb.String())
+        status = self.instance.backend.Class(self.db, "stat",
+            name=hyperdb.String())
+        issue = self.instance.backend.Class(self.db, "iss",
+            title=hyperdb.String(), status=hyperdb.Link('stat'),
+            department=hyperdb.Link('department'))
+
+        d1 = department.create(name='d1')
+        d2 = department.create(name='d2')
+        open = status.create(name='open')
+        closed = status.create(name='closed')
+        issue.create(title='i1', status=open, department=d2)
+        issue.create(title='i2', status=open, department=d1)
+        issue.create(title='i2', status=closed, department=d1)
+
+        chef = self.db.user.lookup('Chef')
+        mary = self.db.user.lookup('mary')
+        self.db.user.set(chef, roles = 'User, Project')
+
+        perm = self.db.security.hasPermission
+        search = self.db.security.hasSearchPermission
+        self.assert_(perm('View', chef, 'iss', 'department', '1'))
+        self.assert_(perm('View', chef, 'iss', 'department', '2'))
+        self.assert_(perm('View', chef, 'iss', 'department', '3'))
+        self.assert_(search(chef, 'iss', 'department'))
+
+        self.assert_(not perm('View', mary, 'iss', 'department'))
+        self.assert_(perm('View', mary, 'iss', 'status'))
+        # Conditionally allow view of whole iss (check is False here,
+        # this might check for department owner in the real world)
+        p = self.db.security.addPermission(name='View', klass='iss',
+            check=lambda x,y,z: False)
+        self.db.security.addPermissionToRole('User', p)
+        self.assert_(perm('View', mary, 'iss', 'department'))
+        self.assert_(not perm('View', mary, 'iss', 'department', '1'))
+        self.assert_(not search(mary, 'iss', 'department'))
+
+        self.assert_(perm('View', mary, 'iss', 'status'))
+        self.assert_(not search(mary, 'iss', 'status'))
+        # Allow user to search for iss.status
+        p = self.db.security.addPermission(name='Search', klass='iss',
+            properties=("status",))
+        self.db.security.addPermissionToRole('User', p)
+        self.assert_(search(mary, 'iss', 'status'))
+
+        dep = {'@action':'search','columns':'id','@filter':'department',
+            'department':'1'}
+        stat = {'@action':'search','columns':'id','@filter':'status',
+            'status':'1'}
+        depsort = {'@action':'search','columns':'id','@sort':'department'}
+        depgrp = {'@action':'search','columns':'id','@group':'department'}
+
+        # Filter on department ignored for role 'User':
+        cl = self._make_client(dep, classname='iss', nodeid=None, userid=mary,
+            template='index')
+        h = HTMLRequest(cl)
+        self.assertEqual([x.id for x in h.batch()],['1', '2', '3'])
+        # Filter on department works for role 'Project':
+        cl = self._make_client(dep, classname='iss', nodeid=None, userid=chef,
+            template='index')
+        h = HTMLRequest(cl)
+        self.assertEqual([x.id for x in h.batch()],['2', '3'])
+        # Filter on status works for all:
+        cl = self._make_client(stat, classname='iss', nodeid=None, userid=mary,
+            template='index')
+        h = HTMLRequest(cl)
+        self.assertEqual([x.id for x in h.batch()],['1', '2'])
+        cl = self._make_client(stat, classname='iss', nodeid=None, userid=chef,
+            template='index')
+        h = HTMLRequest(cl)
+        self.assertEqual([x.id for x in h.batch()],['1', '2'])
+        # Sorting and grouping for class Project works:
+        cl = self._make_client(depsort, classname='iss', nodeid=None,
+            userid=chef, template='index')
+        h = HTMLRequest(cl)
+        self.assertEqual([x.id for x in h.batch()],['2', '3', '1'])
+        cl = self._make_client(depgrp, classname='iss', nodeid=None,
+            userid=chef, template='index')
+        h = HTMLRequest(cl)
+        self.assertEqual([x.id for x in h.batch()],['2', '3', '1'])
+        # Sorting and grouping for class User fails:
+        cl = self._make_client(depsort, classname='iss', nodeid=None,
+            userid=mary, template='index')
+        h = HTMLRequest(cl)
+        self.assertEqual([x.id for x in h.batch()],['1', '2', '3'])
+        cl = self._make_client(depgrp, classname='iss', nodeid=None,
+            userid=mary, template='index')
+        h = HTMLRequest(cl)
+        self.assertEqual([x.id for x in h.batch()],['1', '2', '3'])
+
+    def testEditCSV(self):
+        form = dict(rows='id,name\n1,newkey')
+        cl = self._make_client(form, userid='1', classname='keyword')
+        cl.ok_message = []
+        actions.EditCSVAction(cl).handle()
+        self.assertEqual(cl.ok_message, ['Items edited OK'])
+        k = self.db.keyword.getnode('1')
+        self.assertEqual(k.name, 'newkey')
+        form = dict(rows=u'id,name\n1,\xe4\xf6\xfc'.encode('utf-8'))
+        cl = self._make_client(form, userid='1', classname='keyword')
+        cl.ok_message = []
+        actions.EditCSVAction(cl).handle()
+        self.assertEqual(cl.ok_message, ['Items edited OK'])
+        k = self.db.keyword.getnode('1')
+        self.assertEqual(k.name, u'\xe4\xf6\xfc'.encode('utf-8'))
+
+    def testRoles(self):
+        cl = self._make_client({})
+        self.db.user.set('1', roles='aDmin,    uSer')
+        item = HTMLItem(cl, 'user', '1')
+        self.assert_(item.hasRole('Admin'))
+        self.assert_(item.hasRole('User'))
+        self.assert_(item.hasRole('AdmiN'))
+        self.assert_(item.hasRole('UseR'))
+        self.assert_(item.hasRole('UseR','Admin'))
+        self.assert_(item.hasRole('UseR','somethingelse'))
+        self.assert_(item.hasRole('somethingelse','Admin'))
+        self.assert_(not item.hasRole('userr'))
+        self.assert_(not item.hasRole('adminn'))
+        self.assert_(not item.hasRole(''))
+        self.assert_(not item.hasRole(' '))
+        self.db.user.set('1', roles='')
+        self.assert_(not item.hasRole(''))
+
     def testCSVExport(self):
         cl = self._make_client({'@columns': 'id,name'}, nodeid=None,
             userid='1')

Modified: tracker/roundup-src/test/test_dates.py
==============================================================================
--- tracker/roundup-src/test/test_dates.py	(original)
+++ tracker/roundup-src/test/test_dates.py	Thu Aug  4 15:46:52 2011
@@ -23,11 +23,21 @@
 import datetime
 import calendar
 
+from roundup import date, i18n
 from roundup.date import Date, Interval, Range, fixTimeOverflow, \
     get_timezone
 
 
 class DateTestCase(unittest.TestCase):
+    def setUp(self):
+        self.old_gettext_ = i18n.gettext
+        self.old_ngettext_ = i18n.ngettext
+        i18n.gettext = i18n.get_translation(language='C').gettext
+        i18n.ngettext = i18n.get_translation(language='C').ngettext
+
+    def tearDown(self):
+        i18n.gettext = self.old_gettext_
+        i18n.ngettext = self.old_ngettext_
 
     def testDateInterval(self):
         ae = self.assertEqual

Modified: tracker/roundup-src/test/test_indexer.py
==============================================================================
--- tracker/roundup-src/test/test_indexer.py	(original)
+++ tracker/roundup-src/test/test_indexer.py	Thu Aug  4 15:46:52 2011
@@ -124,6 +124,14 @@
                                                     [('test', '1', 'a'),
                                                      ('test', '2', 'a')])
 
+    def test_wordsplitting(self):
+        """Test if word splitting works."""
+        self.dex.add_text(('test', '1', 'a'), 'aaaa-aaa bbbb*bbb')
+        self.dex.add_text(('test', '2', 'a'), 'aaaA-aaa BBBB*BBB')
+        for k in 'aaaa', 'aaa', 'bbbb', 'bbb':
+            self.assertSeqEqual(self.dex.find([k]),
+                [('test', '1', 'a'), ('test', '2', 'a')])
+
     def tearDown(self):
         shutil.rmtree('test-index')
 

Added: tracker/roundup-src/test/test_mailer.py
==============================================================================
--- (empty file)
+++ tracker/roundup-src/test/test_mailer.py	Thu Aug  4 15:46:52 2011
@@ -0,0 +1,24 @@
+#-*- encoding: utf8 -*-
+import unittest
+
+from roundup import mailer
+
+class EncodingTestCase(unittest.TestCase):
+    def testEncoding(self):
+        a = lambda n, a, c, o: self.assertEquals(mailer.nice_sender_header(n,
+            a, c), o)
+        a('ascii', 'ascii at test.com', 'iso8859-1', 'ascii <ascii at test.com>')
+        a(u'café', 'ascii at test.com', 'iso8859-1',
+            '=?iso8859-1?q?caf=E9?= <ascii at test.com>')
+        a('as"ii', 'ascii at test.com', 'iso8859-1', '"as\\"ii" <ascii at test.com>')
+
+def test_suite():
+    suite = unittest.TestSuite()
+    suite.addTest(unittest.makeSuite(EncodingTestCase))
+    return suite
+
+if __name__ == '__main__':
+    runner = unittest.TextTestRunner()
+    unittest.main(testRunner=runner)
+
+# vim: set et sts=4 sw=4 :

Modified: tracker/roundup-src/test/test_mailgw.py
==============================================================================
--- tracker/roundup-src/test/test_mailgw.py	(original)
+++ tracker/roundup-src/test/test_mailgw.py	Thu Aug  4 15:46:52 2011
@@ -21,12 +21,14 @@
     os.environ['SENDMAILDEBUG'] = 'mail-test.log'
 SENDMAILDEBUG = os.environ['SENDMAILDEBUG']
 
+from roundup import mailgw, i18n, roundupdb
 from roundup.mailgw import MailGW, Unauthorized, uidFromAddress, \
     parseContent, IgnoreLoop, IgnoreBulk, MailUsageError, MailUsageHelp
 from roundup import init, instance, password, rfc2822, __version__
 from roundup.anypy.sets_ import set
 
-import db_test_base
+#import db_test_base
+import memorydb
 
 class Message(rfc822.Message):
     """String-based Message class with equivalence test."""
@@ -37,6 +39,10 @@
         return (self.dict == other.dict and
                 self.fp.read() == other.fp.read())
 
+class Tracker(object):
+    def open(self, journaltag):
+        return self.db
+
 class DiffHelper:
     def compareMessages(self, new, old):
         """Compare messages for semantic equivalence."""
@@ -78,7 +84,7 @@
                 res.extend(body_diff)
 
             if res:
-                res.insert(0, 'Generated message not correct (diff follows):')
+                res.insert(0, 'Generated message not correct (diff follows, expected vs. actual):')
                 raise AssertionError, '\n'.join(res)
 
     def compareStrings(self, s2, s1, replace={}):
@@ -114,13 +120,17 @@
     count = 0
     schema = 'classic'
     def setUp(self):
+        self.old_translate_ = mailgw._
+        roundupdb._ = mailgw._ = i18n.get_translation(language='C').gettext
         MailgwTestCase.count = MailgwTestCase.count + 1
-        self.dirname = '_test_mailgw_%s'%self.count
-        # set up and open a tracker
-        self.instance = db_test_base.setupTracker(self.dirname)
 
-        # and open the database
-        self.db = self.instance.open('admin')
+        # and open the database / "instance"
+        self.db = memorydb.create('admin')
+        self.instance = Tracker()
+        self.instance.db = self.db
+        self.instance.config = self.db.config
+        self.instance.MailGW = MailGW
+
         self.chef_id = self.db.user.create(username='Chef',
             address='chef at bork.bork.bork', realname='Bork, Chef', roles='User')
         self.richard_id = self.db.user.create(username='richard',
@@ -130,27 +140,27 @@
         self.john_id = self.db.user.create(username='john',
             address='john at test.test', roles='User', realname='John Doe',
             alternate_addresses='jondoe at test.test\njohn.doe at test.test')
+        self.rgg_id = self.db.user.create(username='rgg',
+            address='rgg at test.test', roles='User')
 
     def tearDown(self):
+        roundupdb._ = mailgw._ = self.old_translate_
         if os.path.exists(SENDMAILDEBUG):
             os.remove(SENDMAILDEBUG)
         self.db.close()
-        try:
-            shutil.rmtree(self.dirname)
-        except OSError, error:
-            if error.errno not in (errno.ENOENT, errno.ESRCH): raise
-
-    def _handle_mail(self, message):
-        # handler will open a new db handle. On single-threaded
-        # databases we'll have to close our current connection
-        self.db.commit()
-        self.db.close()
-        handler = self.instance.MailGW(self.instance)
+
+    def _create_mailgw(self, message, args=()):
+        class MailGW(self.instance.MailGW):
+            def handle_message(self, message):
+                return self._handle_message(message)
+        handler = MailGW(self.instance, args)
+        handler.db = self.db
+        return handler
+
+    def _handle_mail(self, message, args=()):
+        handler = self._create_mailgw(message, args)
         handler.trapExceptions = 0
-        ret = handler.main(StringIO(message))
-        # handler had its own database, open new connection
-        self.db = self.instance.open('admin')
-        return ret
+        return handler.main(StringIO(message))
 
     def _get_mail(self):
         f = open(SENDMAILDEBUG)
@@ -173,6 +183,99 @@
         assert not os.path.exists(SENDMAILDEBUG)
         self.assertEqual(self.db.issue.get(nodeid, 'title'), 'Testing...')
 
+    def testMessageWithFromInIt(self):
+        nodeid = self._handle_mail('''Content-Type: text/plain;
+  charset="iso-8859-1"
+From: Chef <chef at bork.bork.bork>
+To: issue_tracker at your.tracker.email.domain.example
+Cc: richard at test.test
+Reply-To: chef at bork.bork.bork
+Message-Id: <dummy_test_message_id>
+Subject: [issue] Testing...
+
+From here to there!
+''')
+        assert not os.path.exists(SENDMAILDEBUG)
+        msgid = self.db.issue.get(nodeid, 'messages')[0]
+        self.assertEqual(self.db.msg.get(msgid, 'content'), 'From here to there!')
+
+    def testNoMessageId(self):
+        self.instance.config['MAIL_DOMAIN'] = 'example.com'
+        nodeid = self._handle_mail('''Content-Type: text/plain;
+  charset="iso-8859-1"
+From: Chef <chef at bork.bork.bork>
+To: issue_tracker at your.tracker.email.domain.example
+Cc: richard at test.test
+Reply-To: chef at bork.bork.bork
+Subject: [issue] Testing...
+
+Hi there!
+''')
+        assert not os.path.exists(SENDMAILDEBUG)
+        msgid = self.db.issue.get(nodeid, 'messages')[0]
+        messageid = self.db.msg.get(msgid, 'messageid')
+        x1, x2 = messageid.split('@')
+        self.assertEqual(x2, 'example.com>')
+        x = x1.split('.')[-1]
+        self.assertEqual(x, 'issueNone')
+        nodeid = self._handle_mail('''Content-Type: text/plain;
+  charset="iso-8859-1"
+From: Chef <chef at bork.bork.bork>
+To: issue_tracker at your.tracker.email.domain.example
+Subject: [issue%(nodeid)s] Testing...
+
+Just a test reply
+'''%locals())
+        msgid = self.db.issue.get(nodeid, 'messages')[-1]
+        messageid = self.db.msg.get(msgid, 'messageid')
+        x1, x2 = messageid.split('@')
+        self.assertEqual(x2, 'example.com>')
+        x = x1.split('.')[-1]
+        self.assertEqual(x, "issue%s"%nodeid)
+
+    def testOptions(self):
+        nodeid = self._handle_mail('''Content-Type: text/plain;
+  charset="iso-8859-1"
+From: Chef <chef at bork.bork.bork>
+To: issue_tracker at your.tracker.email.domain.example
+Message-Id: <dummy_test_message_id>
+Reply-To: chef at bork.bork.bork
+Subject: [issue] Testing...
+
+Hi there!
+''', (('-C', 'issue'), ('-S', 'status=chatting;priority=critical')))
+        self.assertEqual(self.db.issue.get(nodeid, 'status'), '3')
+        self.assertEqual(self.db.issue.get(nodeid, 'priority'), '1')
+
+    def testOptionsMulti(self):
+        nodeid = self._handle_mail('''Content-Type: text/plain;
+  charset="iso-8859-1"
+From: Chef <chef at bork.bork.bork>
+To: issue_tracker at your.tracker.email.domain.example
+Message-Id: <dummy_test_message_id>
+Reply-To: chef at bork.bork.bork
+Subject: [issue] Testing...
+
+Hi there!
+''', (('-C', 'issue'), ('-S', 'status=chatting'), ('-S', 'priority=critical')))
+        self.assertEqual(self.db.issue.get(nodeid, 'status'), '3')
+        self.assertEqual(self.db.issue.get(nodeid, 'priority'), '1')
+
+    def testOptionClass(self):
+        nodeid = self._handle_mail('''Content-Type: text/plain;
+  charset="iso-8859-1"
+From: Chef <chef at bork.bork.bork>
+To: issue_tracker at your.tracker.email.domain.example
+Message-Id: <dummy_test_message_id>
+Reply-To: chef at bork.bork.bork
+Subject: [issue] Testing... [status=chatting;priority=critical]
+
+Hi there!
+''', (('-c', 'issue'),))
+        self.assertEqual(self.db.issue.get(nodeid, 'title'), 'Testing...')
+        self.assertEqual(self.db.issue.get(nodeid, 'status'), '3')
+        self.assertEqual(self.db.issue.get(nodeid, 'priority'), '1')
+
     def doNewIssue(self):
         nodeid = self._handle_mail('''Content-Type: text/plain;
   charset="iso-8859-1"
@@ -257,7 +360,8 @@
 Subject: [issue1] Testing...
 To: chef at bork.bork.bork, mary at test.test, richard at test.test
 From: "Bork, Chef" <issue_tracker at your.tracker.email.domain.example>
-Reply-To: Roundup issue tracker <issue_tracker at your.tracker.email.domain.example>
+Reply-To: Roundup issue tracker
+ <issue_tracker at your.tracker.email.domain.example>
 MIME-Version: 1.0
 Message-Id: <dummy_test_message_id>
 X-Roundup-Name: Roundup issue tracker
@@ -301,7 +405,8 @@
 Subject: [issue1] Testing...
 To: mary at test.test, richard at test.test
 From: "Bork, Chef" <issue_tracker at your.tracker.email.domain.example>
-Reply-To: Roundup issue tracker <issue_tracker at your.tracker.email.domain.example>
+Reply-To: Roundup issue tracker
+ <issue_tracker at your.tracker.email.domain.example>
 MIME-Version: 1.0
 Message-Id: <dummy_test_message_id>
 X-Roundup-Name: Roundup issue tracker
@@ -342,7 +447,8 @@
 Subject: [issue1] Testing...
 To: mary at test.test, richard at test.test
 From: "Bork, Chef" <issue_tracker at your.tracker.email.domain.example>
-Reply-To: Roundup issue tracker <issue_tracker at your.tracker.email.domain.example>
+Reply-To: Roundup issue tracker
+ <issue_tracker at your.tracker.email.domain.example>
 MIME-Version: 1.0
 Message-Id: <dummy_test_message_id>
 X-Roundup-Name: Roundup issue tracker
@@ -431,6 +537,77 @@
 --bxyzzy--
 '''
 
+    multipart_msg_latin1 = '''From: mary <mary at test.test>
+To: issue_tracker at your.tracker.email.domain.example
+Message-Id: <followup_dummy_id>
+In-Reply-To: <dummy_test_message_id>
+Subject: [issue1] Testing...
+Content-Type: multipart/alternative; boundary=001485f339f8f361fb049188dbba
+
+
+--001485f339f8f361fb049188dbba
+Content-Type: text/plain; charset=ISO-8859-1
+Content-Transfer-Encoding: quoted-printable
+
+umlaut =E4=F6=FC=C4=D6=DC=DF
+
+--001485f339f8f361fb049188dbba
+Content-Type: text/html; charset=ISO-8859-1
+Content-Transfer-Encoding: quoted-printable
+
+<html>umlaut =E4=F6=FC=C4=D6=DC=DF</html>
+
+--001485f339f8f361fb049188dbba--
+'''
+
+    multipart_msg_rfc822 = '''From: mary <mary at test.test>
+To: issue_tracker at your.tracker.email.domain.example
+Message-Id: <followup_dummy_id>
+In-Reply-To: <dummy_test_message_id>
+Subject: [issue1] Testing...
+Content-Type: multipart/mixed; boundary=001485f339f8f361fb049188dbba
+
+This is a multi-part message in MIME format.
+--001485f339f8f361fb049188dbba
+Content-Type: text/plain; charset=ISO-8859-15
+Content-Transfer-Encoding: 7bit
+
+First part: Text
+
+--001485f339f8f361fb049188dbba
+Content-Type: message/rfc822; name="Fwd: Original email subject.eml"
+Content-Transfer-Encoding: 7bit
+Content-Disposition: attachment; filename="Fwd: Original email subject.eml"
+
+Message-Id: <followup_dummy_id_2>
+In-Reply-To: <dummy_test_message_id_2>
+MIME-Version: 1.0
+Subject: Fwd: Original email subject
+Date: Mon, 23 Aug 2010 08:23:33 +0200
+Content-Type: multipart/alternative; boundary="090500050101020406060002"
+
+This is a multi-part message in MIME format.
+--090500050101020406060002
+Content-Type: text/plain; charset=ISO-8859-15; format=flowed
+Content-Transfer-Encoding: 7bit
+
+some text in inner email
+========================
+
+--090500050101020406060002
+Content-Type: text/html; charset=ISO-8859-15
+Content-Transfer-Encoding: 7bit
+
+<html>
+some text in inner email
+========================
+</html>
+
+--090500050101020406060002--
+
+--001485f339f8f361fb049188dbba--
+'''
+
     def testMultipartKeepAlternatives(self):
         self.doNewIssue()
         self._handle_mail(self.multipart_msg)
@@ -448,23 +625,412 @@
                 self.assertEqual(f.content, content [n])
         self.assertEqual(msg.content, 'test attachment second text/plain')
 
-    def testMultipartDropAlternatives(self):
+    def testMultipartSeveralAttachmentMessages(self):
+        self.doNewIssue()
+        self._handle_mail(self.multipart_msg)
+        messages = self.db.issue.get('1', 'messages')
+        messages.sort()
+        self.assertEqual(messages[-1], '2')
+        msg = self.db.msg.getnode (messages[-1])
+        self.assertEqual(len(msg.files), 5)
+        issue = self.db.issue.getnode ('1')
+        self.assertEqual(len(issue.files), 5)
+        names = {0 : 'first.dvi', 4 : 'second.dvi'}
+        content = {3 : 'test attachment third text/plain\n',
+                   4 : 'Just a test\n'}
+        for n, id in enumerate (msg.files):
+            f = self.db.file.getnode (id)
+            self.assertEqual(f.name, names.get (n, 'unnamed'))
+            if n in content :
+                self.assertEqual(f.content, content [n])
+        self.assertEqual(msg.content, 'test attachment second text/plain')
+        self.assertEqual(msg.files, ['1', '2', '3', '4', '5'])
+        self.assertEqual(issue.files, ['1', '2', '3', '4', '5'])
+
+        self._handle_mail(self.multipart_msg)
+        issue = self.db.issue.getnode ('1')
+        self.assertEqual(len(issue.files), 10)
+        messages = self.db.issue.get('1', 'messages')
+        messages.sort()
+        self.assertEqual(messages[-1], '3')
+        msg = self.db.msg.getnode (messages[-1])
+        self.assertEqual(issue.files, [str(i+1) for i in range(10)])
+        self.assertEqual(msg.files, ['6', '7', '8', '9', '10'])
+
+    def testMultipartKeepFiles(self):
+        self.doNewIssue()
+        self._handle_mail(self.multipart_msg)
+        messages = self.db.issue.get('1', 'messages')
+        messages.sort()
+        msg = self.db.msg.getnode (messages[-1])
+        self.assertEqual(len(msg.files), 5)
+        issue = self.db.issue.getnode ('1')
+        self.assertEqual(len(issue.files), 5)
+        names = {0 : 'first.dvi', 4 : 'second.dvi'}
+        content = {3 : 'test attachment third text/plain\n',
+                   4 : 'Just a test\n'}
+        for n, id in enumerate (msg.files):
+            f = self.db.file.getnode (id)
+            self.assertEqual(f.name, names.get (n, 'unnamed'))
+            if n in content :
+                self.assertEqual(f.content, content [n])
+        self.assertEqual(msg.content, 'test attachment second text/plain')
+        self._handle_mail('''From: mary <mary at test.test>
+To: issue_tracker at your.tracker.email.domain.example
+Message-Id: <followup_dummy_id2>
+In-Reply-To: <dummy_test_message_id>
+Subject: [issue1] Testing...
+
+This ist a message without attachment
+''')
+        issue = self.db.issue.getnode ('1')
+        self.assertEqual(len(issue.files), 5)
+        self.assertEqual(issue.files, ['1', '2', '3', '4', '5'])
+
+    def testMultipartDropAlternatives(self):
+        self.doNewIssue()
+        self.db.config.MAILGW_IGNORE_ALTERNATIVES = True
+        self._handle_mail(self.multipart_msg)
+        messages = self.db.issue.get('1', 'messages')
+        messages.sort()
+        msg = self.db.msg.getnode (messages[-1])
+        self.assertEqual(len(msg.files), 2)
+        names = {1 : 'second.dvi'}
+        content = {0 : 'test attachment third text/plain\n',
+                   1 : 'Just a test\n'}
+        for n, id in enumerate (msg.files):
+            f = self.db.file.getnode (id)
+            self.assertEqual(f.name, names.get (n, 'unnamed'))
+            if n in content :
+                self.assertEqual(f.content, content [n])
+        self.assertEqual(msg.content, 'test attachment second text/plain')
+
+    def testMultipartCharsetUTF8NoAttach(self):
+        c = 'umlaut \xc3\xa4\xc3\xb6\xc3\xbc\xc3\x84\xc3\x96\xc3\x9c\xc3\x9f'
+        self.doNewIssue()
+        self.db.config.NOSY_MAX_ATTACHMENT_SIZE = 0
+        self._handle_mail(self.multipart_msg_latin1)
+        messages = self.db.issue.get('1', 'messages')
+        messages.sort()
+        msg = self.db.msg.getnode (messages[-1])
+        self.assertEqual(len(msg.files), 1)
+        name = 'unnamed'
+        content = '<html>' + c + '</html>\n'
+        for n, id in enumerate (msg.files):
+            f = self.db.file.getnode (id)
+            self.assertEqual(f.name, name)
+            self.assertEqual(f.content, content)
+        self.assertEqual(msg.content, c)
+        self.compareMessages(self._get_mail(),
+'''FROM: roundup-admin at your.tracker.email.domain.example
+TO: chef at bork.bork.bork, richard at test.test
+Content-Type: text/plain; charset="utf-8"
+Subject: [issue1] Testing...
+To: chef at bork.bork.bork, richard at test.test
+From: "Contrary, Mary" <issue_tracker at your.tracker.email.domain.example>
+Reply-To: Roundup issue tracker
+ <issue_tracker at your.tracker.email.domain.example>
+MIME-Version: 1.0
+Message-Id: <followup_dummy_id>
+In-Reply-To: <dummy_test_message_id>
+X-Roundup-Name: Roundup issue tracker
+X-Roundup-Loop: hello
+X-Roundup-Issue-Status: chatting
+X-Roundup-Issue-Files: unnamed
+Content-Transfer-Encoding: quoted-printable
+
+
+Contrary, Mary <mary at test.test> added the comment:
+
+umlaut =C3=A4=C3=B6=C3=BC=C3=84=C3=96=C3=9C=C3=9F
+File 'unnamed' not attached - you can download it from http://tracker.examp=
+le/cgi-bin/roundup.cgi/bugs/file1.
+
+----------
+status: unread -> chatting
+
+_______________________________________________________________________
+Roundup issue tracker <issue_tracker at your.tracker.email.domain.example>
+<http://tracker.example/cgi-bin/roundup.cgi/bugs/issue1>
+_______________________________________________________________________
+''')
+
+    def testMultipartCharsetLatin1NoAttach(self):
+        c = 'umlaut \xc3\xa4\xc3\xb6\xc3\xbc\xc3\x84\xc3\x96\xc3\x9c\xc3\x9f'
+        self.doNewIssue()
+        self.db.config.NOSY_MAX_ATTACHMENT_SIZE = 0
+        self.db.config.MAIL_CHARSET = 'iso-8859-1'
+        self._handle_mail(self.multipart_msg_latin1)
+        messages = self.db.issue.get('1', 'messages')
+        messages.sort()
+        msg = self.db.msg.getnode (messages[-1])
+        self.assertEqual(len(msg.files), 1)
+        name = 'unnamed'
+        content = '<html>' + c + '</html>\n'
+        for n, id in enumerate (msg.files):
+            f = self.db.file.getnode (id)
+            self.assertEqual(f.name, name)
+            self.assertEqual(f.content, content)
+        self.assertEqual(msg.content, c)
+        self.compareMessages(self._get_mail(),
+'''FROM: roundup-admin at your.tracker.email.domain.example
+TO: chef at bork.bork.bork, richard at test.test
+Content-Type: text/plain; charset="iso-8859-1"
+Subject: [issue1] Testing...
+To: chef at bork.bork.bork, richard at test.test
+From: "Contrary, Mary" <issue_tracker at your.tracker.email.domain.example>
+Reply-To: Roundup issue tracker
+ <issue_tracker at your.tracker.email.domain.example>
+MIME-Version: 1.0
+Message-Id: <followup_dummy_id>
+In-Reply-To: <dummy_test_message_id>
+X-Roundup-Name: Roundup issue tracker
+X-Roundup-Loop: hello
+X-Roundup-Issue-Status: chatting
+X-Roundup-Issue-Files: unnamed
+Content-Transfer-Encoding: quoted-printable
+
+
+Contrary, Mary <mary at test.test> added the comment:
+
+umlaut =E4=F6=FC=C4=D6=DC=DF
+File 'unnamed' not attached - you can download it from http://tracker.examp=
+le/cgi-bin/roundup.cgi/bugs/file1.
+
+----------
+status: unread -> chatting
+
+_______________________________________________________________________
+Roundup issue tracker <issue_tracker at your.tracker.email.domain.example>
+<http://tracker.example/cgi-bin/roundup.cgi/bugs/issue1>
+_______________________________________________________________________
+''')
+
+    def testMultipartCharsetUTF8AttachFile(self):
+        c = 'umlaut \xc3\xa4\xc3\xb6\xc3\xbc\xc3\x84\xc3\x96\xc3\x9c\xc3\x9f'
+        self.doNewIssue()
+        self._handle_mail(self.multipart_msg_latin1)
+        messages = self.db.issue.get('1', 'messages')
+        messages.sort()
+        msg = self.db.msg.getnode (messages[-1])
+        self.assertEqual(len(msg.files), 1)
+        name = 'unnamed'
+        content = '<html>' + c + '</html>\n'
+        for n, id in enumerate (msg.files):
+            f = self.db.file.getnode (id)
+            self.assertEqual(f.name, name)
+            self.assertEqual(f.content, content)
+        self.assertEqual(msg.content, c)
+        self.compareMessages(self._get_mail(),
+'''FROM: roundup-admin at your.tracker.email.domain.example
+TO: chef at bork.bork.bork, richard at test.test
+Content-Type: multipart/mixed; boundary="utf-8"
+Subject: [issue1] Testing...
+To: chef at bork.bork.bork, richard at test.test
+From: "Contrary, Mary" <issue_tracker at your.tracker.email.domain.example>
+Reply-To: Roundup issue tracker
+ <issue_tracker at your.tracker.email.domain.example>
+MIME-Version: 1.0
+Message-Id: <followup_dummy_id>
+In-Reply-To: <dummy_test_message_id>
+X-Roundup-Name: Roundup issue tracker
+X-Roundup-Loop: hello
+X-Roundup-Issue-Status: chatting
+X-Roundup-Issue-Files: unnamed
+Content-Transfer-Encoding: quoted-printable
+
+
+--utf-8
+MIME-Version: 1.0
+Content-Type: text/plain; charset="utf-8"
+Content-Transfer-Encoding: quoted-printable
+
+
+Contrary, Mary <mary at test.test> added the comment:
+
+umlaut =C3=A4=C3=B6=C3=BC=C3=84=C3=96=C3=9C=C3=9F
+
+----------
+status: unread -> chatting
+
+_______________________________________________________________________
+Roundup issue tracker <issue_tracker at your.tracker.email.domain.example>
+<http://tracker.example/cgi-bin/roundup.cgi/bugs/issue1>
+_______________________________________________________________________
+--utf-8
+Content-Type: text/html
+MIME-Version: 1.0
+Content-Transfer-Encoding: base64
+Content-Disposition: attachment;
+ filename="unnamed"
+
+PGh0bWw+dW1sYXV0IMOkw7bDvMOEw5bDnMOfPC9odG1sPgo=
+
+--utf-8--
+''')
+
+    def testMultipartCharsetLatin1AttachFile(self):
+        c = 'umlaut \xc3\xa4\xc3\xb6\xc3\xbc\xc3\x84\xc3\x96\xc3\x9c\xc3\x9f'
+        self.doNewIssue()
+        self.db.config.MAIL_CHARSET = 'iso-8859-1'
+        self._handle_mail(self.multipart_msg_latin1)
+        messages = self.db.issue.get('1', 'messages')
+        messages.sort()
+        msg = self.db.msg.getnode (messages[-1])
+        self.assertEqual(len(msg.files), 1)
+        name = 'unnamed'
+        content = '<html>' + c + '</html>\n'
+        for n, id in enumerate (msg.files):
+            f = self.db.file.getnode (id)
+            self.assertEqual(f.name, name)
+            self.assertEqual(f.content, content)
+        self.assertEqual(msg.content, c)
+        self.compareMessages(self._get_mail(),
+'''FROM: roundup-admin at your.tracker.email.domain.example
+TO: chef at bork.bork.bork, richard at test.test
+Content-Type: multipart/mixed; boundary="utf-8"
+Subject: [issue1] Testing...
+To: chef at bork.bork.bork, richard at test.test
+From: "Contrary, Mary" <issue_tracker at your.tracker.email.domain.example>
+Reply-To: Roundup issue tracker
+ <issue_tracker at your.tracker.email.domain.example>
+MIME-Version: 1.0
+Message-Id: <followup_dummy_id>
+In-Reply-To: <dummy_test_message_id>
+X-Roundup-Name: Roundup issue tracker
+X-Roundup-Loop: hello
+X-Roundup-Issue-Status: chatting
+X-Roundup-Issue-Files: unnamed
+Content-Transfer-Encoding: quoted-printable
+
+
+--utf-8
+MIME-Version: 1.0
+Content-Type: text/plain; charset="iso-8859-1"
+Content-Transfer-Encoding: quoted-printable
+
+
+Contrary, Mary <mary at test.test> added the comment:
+
+umlaut =E4=F6=FC=C4=D6=DC=DF
+
+----------
+status: unread -> chatting
+
+_______________________________________________________________________
+Roundup issue tracker <issue_tracker at your.tracker.email.domain.example>
+<http://tracker.example/cgi-bin/roundup.cgi/bugs/issue1>
+_______________________________________________________________________
+--utf-8
+Content-Type: text/html
+MIME-Version: 1.0
+Content-Transfer-Encoding: base64
+Content-Disposition: attachment;
+ filename="unnamed"
+
+PGh0bWw+dW1sYXV0IMOkw7bDvMOEw5bDnMOfPC9odG1sPgo=
+
+--utf-8--
+''')
+
+    def testMultipartRFC822(self):
+        self.doNewIssue()
+        self._handle_mail(self.multipart_msg_rfc822)
+        messages = self.db.issue.get('1', 'messages')
+        messages.sort()
+        msg = self.db.msg.getnode (messages[-1])
+        self.assertEqual(len(msg.files), 1)
+        name = "Fwd: Original email subject.eml"
+        for n, id in enumerate (msg.files):
+            f = self.db.file.getnode (id)
+            self.assertEqual(f.name, name)
+        self.assertEqual(msg.content, 'First part: Text')
+        self.compareMessages(self._get_mail(),
+'''TO: chef at bork.bork.bork, richard at test.test
+Content-Type: text/plain; charset="utf-8"
+Subject: [issue1] Testing...
+To: chef at bork.bork.bork, richard at test.test
+From: "Contrary, Mary" <issue_tracker at your.tracker.email.domain.example>
+Reply-To: Roundup issue tracker
+ <issue_tracker at your.tracker.email.domain.example>
+MIME-Version: 1.0
+Message-Id: <followup_dummy_id>
+In-Reply-To: <dummy_test_message_id>
+X-Roundup-Name: Roundup issue tracker
+X-Roundup-Loop: hello
+X-Roundup-Issue-Status: chatting
+X-Roundup-Issue-Files: Fwd: Original email subject.eml
+Content-Transfer-Encoding: quoted-printable
+
+
+--utf-8
+MIME-Version: 1.0
+Content-Type: text/plain; charset="utf-8"
+Content-Transfer-Encoding: quoted-printable
+
+
+Contrary, Mary <mary at test.test> added the comment:
+
+First part: Text
+
+----------
+status: unread -> chatting
+
+_______________________________________________________________________
+Roundup issue tracker <issue_tracker at your.tracker.email.domain.example>
+<http://tracker.example/cgi-bin/roundup.cgi/bugs/issue1>
+_______________________________________________________________________
+--utf-8
+Content-Type: message/rfc822
+MIME-Version: 1.0
+Content-Disposition: attachment;
+ filename="Fwd: Original email subject.eml"
+
+Message-Id: <followup_dummy_id_2>
+In-Reply-To: <dummy_test_message_id_2>
+MIME-Version: 1.0
+Subject: Fwd: Original email subject
+Date: Mon, 23 Aug 2010 08:23:33 +0200
+Content-Type: multipart/alternative; boundary="090500050101020406060002"
+
+This is a multi-part message in MIME format.
+--090500050101020406060002
+Content-Type: text/plain; charset=ISO-8859-15; format=flowed
+Content-Transfer-Encoding: 7bit
+
+some text in inner email
+========================
+
+--090500050101020406060002
+Content-Type: text/html; charset=ISO-8859-15
+Content-Transfer-Encoding: 7bit
+
+<html>
+some text in inner email
+========================
+</html>
+
+--090500050101020406060002--
+
+--utf-8--
+''')
+
+    def testMultipartRFC822Unpack(self):
         self.doNewIssue()
-        self.db.config.MAILGW_IGNORE_ALTERNATIVES = True
-        self._handle_mail(self.multipart_msg)
+        self.db.config.MAILGW_UNPACK_RFC822 = True
+        self._handle_mail(self.multipart_msg_rfc822)
         messages = self.db.issue.get('1', 'messages')
         messages.sort()
         msg = self.db.msg.getnode (messages[-1])
-        assert(len(msg.files) == 2)
-        names = {1 : 'second.dvi'}
-        content = {0 : 'test attachment third text/plain\n',
-                   1 : 'Just a test\n'}
+        self.assertEqual(len(msg.files), 2)
+        t = 'some text in inner email\n========================\n'
+        content = {0 : t, 1 : '<html>\n' + t + '</html>\n'}
         for n, id in enumerate (msg.files):
             f = self.db.file.getnode (id)
-            self.assertEqual(f.name, names.get (n, 'unnamed'))
+            self.assertEqual(f.name, 'unnamed')
             if n in content :
                 self.assertEqual(f.content, content [n])
-        self.assertEqual(msg.content, 'test attachment second text/plain')
+        self.assertEqual(msg.content, 'First part: Text')
 
     def testSimpleFollowup(self):
         self.doNewIssue()
@@ -485,7 +1051,8 @@
 Subject: [issue1] Testing...
 To: chef at bork.bork.bork, richard at test.test
 From: "Contrary, Mary" <issue_tracker at your.tracker.email.domain.example>
-Reply-To: Roundup issue tracker <issue_tracker at your.tracker.email.domain.example>
+Reply-To: Roundup issue tracker
+ <issue_tracker at your.tracker.email.domain.example>
 MIME-Version: 1.0
 Message-Id: <followup_dummy_id>
 In-Reply-To: <dummy_test_message_id>
@@ -533,7 +1100,112 @@
 Subject: [issue1] Testing...
 To: chef at bork.bork.bork, john at test.test, mary at test.test
 From: richard <issue_tracker at your.tracker.email.domain.example>
-Reply-To: Roundup issue tracker <issue_tracker at your.tracker.email.domain.example>
+Reply-To: Roundup issue tracker
+ <issue_tracker at your.tracker.email.domain.example>
+MIME-Version: 1.0
+Message-Id: <followup_dummy_id>
+In-Reply-To: <dummy_test_message_id>
+X-Roundup-Name: Roundup issue tracker
+X-Roundup-Loop: hello
+X-Roundup-Issue-Status: chatting
+Content-Transfer-Encoding: quoted-printable
+
+
+richard <richard at test.test> added the comment:
+
+This is a followup
+
+----------
+assignedto:  -> mary
+nosy: +john, mary
+status: unread -> chatting
+
+_______________________________________________________________________
+Roundup issue tracker <issue_tracker at your.tracker.email.domain.example>
+<http://tracker.example/cgi-bin/roundup.cgi/bugs/issue1>
+_______________________________________________________________________
+''')
+
+    def testFollowupNoSubjectChange(self):
+        self.db.config.MAILGW_SUBJECT_UPDATES_TITLE = 'no'
+        self.doNewIssue()
+
+        self._handle_mail('''Content-Type: text/plain;
+  charset="iso-8859-1"
+From: richard <richard at test.test>
+To: issue_tracker at your.tracker.email.domain.example
+Message-Id: <followup_dummy_id>
+In-Reply-To: <dummy_test_message_id>
+Subject: [issue1] Wrzlbrmft... [assignedto=mary; nosy=+john]
+
+This is a followup
+''')
+        l = self.db.issue.get('1', 'nosy')
+        l.sort()
+        self.assertEqual(l, [self.chef_id, self.richard_id, self.mary_id,
+            self.john_id])
+
+        self.compareMessages(self._get_mail(),
+'''FROM: roundup-admin at your.tracker.email.domain.example
+TO: chef at bork.bork.bork, john at test.test, mary at test.test
+Content-Type: text/plain; charset="utf-8"
+Subject: [issue1] Testing...
+To: chef at bork.bork.bork, john at test.test, mary at test.test
+From: richard <issue_tracker at your.tracker.email.domain.example>
+Reply-To: Roundup issue tracker
+ <issue_tracker at your.tracker.email.domain.example>
+MIME-Version: 1.0
+Message-Id: <followup_dummy_id>
+In-Reply-To: <dummy_test_message_id>
+X-Roundup-Name: Roundup issue tracker
+X-Roundup-Loop: hello
+X-Roundup-Issue-Status: chatting
+Content-Transfer-Encoding: quoted-printable
+
+
+richard <richard at test.test> added the comment:
+
+This is a followup
+
+----------
+assignedto:  -> mary
+nosy: +john, mary
+status: unread -> chatting
+
+_______________________________________________________________________
+Roundup issue tracker <issue_tracker at your.tracker.email.domain.example>
+<http://tracker.example/cgi-bin/roundup.cgi/bugs/issue1>
+_______________________________________________________________________
+''')
+        self.assertEqual(self.db.issue.get('1','title'), 'Testing...')
+
+    def testFollowupExplicitSubjectChange(self):
+        self.doNewIssue()
+
+        self._handle_mail('''Content-Type: text/plain;
+  charset="iso-8859-1"
+From: richard <richard at test.test>
+To: issue_tracker at your.tracker.email.domain.example
+Message-Id: <followup_dummy_id>
+In-Reply-To: <dummy_test_message_id>
+Subject: [issue1] Wrzlbrmft... [assignedto=mary; nosy=+john; title=new title]
+
+This is a followup
+''')
+        l = self.db.issue.get('1', 'nosy')
+        l.sort()
+        self.assertEqual(l, [self.chef_id, self.richard_id, self.mary_id,
+            self.john_id])
+
+        self.compareMessages(self._get_mail(),
+'''FROM: roundup-admin at your.tracker.email.domain.example
+TO: chef at bork.bork.bork, john at test.test, mary at test.test
+Content-Type: text/plain; charset="utf-8"
+Subject: [issue1] new title
+To: chef at bork.bork.bork, john at test.test, mary at test.test
+From: richard <issue_tracker at your.tracker.email.domain.example>
+Reply-To: Roundup issue tracker
+ <issue_tracker at your.tracker.email.domain.example>
 MIME-Version: 1.0
 Message-Id: <followup_dummy_id>
 In-Reply-To: <dummy_test_message_id>
@@ -551,6 +1223,7 @@
 assignedto:  -> mary
 nosy: +john, mary
 status: unread -> chatting
+title: Testing... -> new title
 
 _______________________________________________________________________
 Roundup issue tracker <issue_tracker at your.tracker.email.domain.example>
@@ -558,6 +1231,49 @@
 _______________________________________________________________________
 ''')
 
+    def testNosyGeneration(self):
+        self.db.issue.create(title='test')
+
+        # create a nosy message
+        msg = self.db.msg.create(content='This is a test',
+            author=self.richard_id, messageid='<dummy_test_message_id>')
+        self.db.journaltag = 'richard'
+        l = self.db.issue.create(title='test', messages=[msg],
+            nosy=[self.chef_id, self.mary_id, self.john_id])
+
+        self.compareMessages(self._get_mail(),
+'''FROM: roundup-admin at your.tracker.email.domain.example
+TO: chef at bork.bork.bork, john at test.test, mary at test.test
+Content-Type: text/plain; charset="utf-8"
+Subject: [issue2] test
+To: chef at bork.bork.bork, john at test.test, mary at test.test
+From: richard <issue_tracker at your.tracker.email.domain.example>
+Reply-To: Roundup issue tracker
+ <issue_tracker at your.tracker.email.domain.example>
+MIME-Version: 1.0
+Message-Id: <dummy_test_message_id>
+X-Roundup-Name: Roundup issue tracker
+X-Roundup-Loop: hello
+X-Roundup-Issue-Status: unread
+Content-Transfer-Encoding: quoted-printable
+
+
+New submission from richard <richard at test.test>:
+
+This is a test
+
+----------
+messages: 1
+nosy: Chef, john, mary, richard
+status: unread
+title: test
+
+_______________________________________________________________________
+Roundup issue tracker <issue_tracker at your.tracker.email.domain.example>
+<http://tracker.example/cgi-bin/roundup.cgi/bugs/issue2>
+_______________________________________________________________________
+''')
+
     def testPropertyChangeOnly(self):
         self.doNewIssue()
         oldvalues = self.db.getnode('issue', '1').copy()
@@ -565,7 +1281,7 @@
         # reconstruct old behaviour: This would reuse the
         # database-handle from the doNewIssue above which has committed
         # as user "Chef". So we close and reopen the db as that user.
-        self.db.close()
+        #self.db.close() actually don't close 'cos this empties memorydb
         self.db = self.instance.open('Chef')
         self.db.issue.set('1', assignedto=self.chef_id)
         self.db.commit()
@@ -590,8 +1306,10 @@
 X-Roundup-Loop: hello
 X-Roundup-Issue-Status: unread
 X-Roundup-Version: 1.3.3
+In-Reply-To: <dummy_test_message_id>
 MIME-Version: 1.0
-Reply-To: Roundup issue tracker <issue_tracker at your.tracker.email.domain.example>
+Reply-To: Roundup issue tracker
+ <issue_tracker at your.tracker.email.domain.example>
 Content-Transfer-Encoding: quoted-printable
 
 
@@ -629,7 +1347,8 @@
 Subject: [issue1] Testing...
 To: chef at bork.bork.bork, john at test.test, mary at test.test
 From: richard <issue_tracker at your.tracker.email.domain.example>
-Reply-To: Roundup issue tracker <issue_tracker at your.tracker.email.domain.example>
+Reply-To: Roundup issue tracker
+ <issue_tracker at your.tracker.email.domain.example>
 MIME-Version: 1.0
 Message-Id: <followup_dummy_id>
 In-Reply-To: <dummy_test_message_id>
@@ -742,7 +1461,8 @@
 Subject: [issue1] Testing...
 To: chef at bork.bork.bork, richard at test.test
 From: John Doe <issue_tracker at your.tracker.email.domain.example>
-Reply-To: Roundup issue tracker <issue_tracker at your.tracker.email.domain.example>
+Reply-To: Roundup issue tracker
+ <issue_tracker at your.tracker.email.domain.example>
 MIME-Version: 1.0
 Message-Id: <followup_dummy_id>
 In-Reply-To: <dummy_test_message_id>
@@ -788,7 +1508,8 @@
 Subject: [issue1] Testing...
 To: chef at bork.bork.bork
 From: richard <issue_tracker at your.tracker.email.domain.example>
-Reply-To: Roundup issue tracker <issue_tracker at your.tracker.email.domain.example>
+Reply-To: Roundup issue tracker
+ <issue_tracker at your.tracker.email.domain.example>
 MIME-Version: 1.0
 Message-Id: <followup_dummy_id>
 In-Reply-To: <dummy_test_message_id>
@@ -834,7 +1555,8 @@
 Subject: [issue1] Testing...
 To: chef at bork.bork.bork, john at test.test, richard at test.test
 From: John Doe <issue_tracker at your.tracker.email.domain.example>
-Reply-To: Roundup issue tracker <issue_tracker at your.tracker.email.domain.example>
+Reply-To: Roundup issue tracker
+ <issue_tracker at your.tracker.email.domain.example>
 MIME-Version: 1.0
 Message-Id: <followup_dummy_id>
 In-Reply-To: <dummy_test_message_id>
@@ -879,7 +1601,8 @@
 Subject: [issue1] Testing...
 To: chef at bork.bork.bork, richard at test.test
 From: John Doe <issue_tracker at your.tracker.email.domain.example>
-Reply-To: Roundup issue tracker <issue_tracker at your.tracker.email.domain.example>
+Reply-To: Roundup issue tracker
+ <issue_tracker at your.tracker.email.domain.example>
 MIME-Version: 1.0
 Message-Id: <followup_dummy_id>
 In-Reply-To: <dummy_test_message_id>
@@ -924,7 +1647,8 @@
 Subject: [issue1] Testing...
 To: chef at bork.bork.bork
 From: richard <issue_tracker at your.tracker.email.domain.example>
-Reply-To: Roundup issue tracker <issue_tracker at your.tracker.email.domain.example>
+Reply-To: Roundup issue tracker
+ <issue_tracker at your.tracker.email.domain.example>
 MIME-Version: 1.0
 Message-Id: <followup_dummy_id>
 In-Reply-To: <dummy_test_message_id>
@@ -1008,7 +1732,7 @@
         assert not os.path.exists(SENDMAILDEBUG)
 
     def testNewUserAuthor(self):
-
+        self.db.commit()
         l = self.db.user.list()
         l.sort()
         message = '''Content-Type: text/plain;
@@ -1020,12 +1744,9 @@
 
 This is a test submission of a new issue.
 '''
-        def hook (db, **kw):
-            ''' set up callback for db open '''
-            db.security.role['anonymous'].permissions=[]
-            anonid = db.user.lookup('anonymous')
-            db.user.set(anonid, roles='Anonymous')
-        self.instance.schema_hook = hook
+        self.db.security.role['anonymous'].permissions=[]
+        anonid = self.db.user.lookup('anonymous')
+        self.db.user.set(anonid, roles='Anonymous')
         try:
             self._handle_mail(message)
         except Unauthorized, value:
@@ -1034,23 +1755,17 @@
 
 Unknown address: fubar at bork.bork.bork
 """)
-
             assert not body_diff, body_diff
-
         else:
             raise AssertionError, "Unathorized not raised when handling mail"
 
-
-        def hook (db, **kw):
-            ''' set up callback for db open '''
-            # Add Web Access role to anonymous, and try again to make sure
-            # we get a "please register at:" message this time.
-            p = [
-                db.security.getPermission('Create', 'user'),
-                db.security.getPermission('Web Access', None),
-            ]
-            db.security.role['anonymous'].permissions=p
-        self.instance.schema_hook = hook
+        # Add Web Access role to anonymous, and try again to make sure
+        # we get a "please register at:" message this time.
+        p = [
+            self.db.security.getPermission('Register', 'user'),
+            self.db.security.getPermission('Web Access', None),
+        ]
+        self.db.security.role['anonymous'].permissions=p
         try:
             self._handle_mail(message)
         except Unauthorized, value:
@@ -1063,9 +1778,7 @@
 
 Unknown address: fubar at bork.bork.bork
 """)
-
             assert not body_diff, body_diff
-
         else:
             raise AssertionError, "Unathorized not raised when handling mail"
 
@@ -1074,21 +1787,18 @@
         m.sort()
         self.assertEqual(l, m)
 
-        def hook (db, **kw):
-            ''' set up callback for db open '''
-            # now with the permission
-            p = [
-                db.security.getPermission('Create', 'user'),
-                db.security.getPermission('Email Access', None),
-            ]
-            db.security.role['anonymous'].permissions=p
-        self.instance.schema_hook = hook
+        # now with the permission
+        p = [
+            self.db.security.getPermission('Register', 'user'),
+            self.db.security.getPermission('Email Access', None),
+        ]
+        self.db.security.role['anonymous'].permissions=p
         self._handle_mail(message)
         m = self.db.user.list()
         m.sort()
         self.assertNotEqual(l, m)
 
-    def testNewUserAuthorHighBit(self):
+    def testNewUserAuthorEncodedName(self):
         l = set(self.db.user.list())
         # From: name has Euro symbol in it
         message = '''Content-Type: text/plain;
@@ -1100,20 +1810,48 @@
 
 This is a test submission of a new issue.
 '''
-        def hook (db, **kw):
-            ''' set up callback for db open '''
-            p = [
-                db.security.getPermission('Create', 'user'),
-                db.security.getPermission('Email Access', None),
-            ]
-            db.security.role['anonymous'].permissions=p
-        self.instance.schema_hook = hook
+        p = [
+            self.db.security.getPermission('Register', 'user'),
+            self.db.security.getPermission('Email Access', None),
+            self.db.security.getPermission('Create', 'issue'),
+            self.db.security.getPermission('Create', 'msg'),
+        ]
+        self.db.security.role['anonymous'].permissions = p
         self._handle_mail(message)
         m = set(self.db.user.list())
         new = list(m - l)[0]
         name = self.db.user.get(new, 'realname')
         self.assertEquals(name, 'H€llo')
 
+    def testNewUserAuthorMixedEncodedName(self):
+        l = set(self.db.user.list())
+        # From: name has Euro symbol in it
+        message = '''Content-Type: text/plain;
+  charset="iso-8859-1"
+From: Firstname =?utf-8?b?w6TDtsOf?= Last <fubar at bork.bork.bork>
+To: issue_tracker at your.tracker.email.domain.example
+Message-Id: <dummy_test_message_id>
+Subject: [issue] Test =?utf-8?b?w4TDlsOc?= umlauts
+ X1
+ X2
+
+This is a test submission of a new issue.
+'''
+        p = [
+            self.db.security.getPermission('Register', 'user'),
+            self.db.security.getPermission('Email Access', None),
+            self.db.security.getPermission('Create', 'issue'),
+            self.db.security.getPermission('Create', 'msg'),
+        ]
+        self.db.security.role['anonymous'].permissions = p
+        self._handle_mail(message)
+        title = self.db.issue.get('1', 'title')
+        self.assertEquals(title, 'Test \xc3\x84\xc3\x96\xc3\x9c umlauts X1 X2')
+        m = set(self.db.user.list())
+        new = list(m - l)[0]
+        name = self.db.user.get(new, 'realname')
+        self.assertEquals(name, 'Firstname \xc3\xa4\xc3\xb6\xc3\x9f Last')
+
     def testUnknownUser(self):
         l = set(self.db.user.list())
         message = '''Content-Type: text/plain;
@@ -1125,8 +1863,7 @@
 
 This is a test submission of a new issue.
 '''
-        self.db.close()
-        handler = self.instance.MailGW(self.instance)
+        handler = self._create_mailgw(message)
         # we want a bounce message:
         handler.trapExceptions = 1
         ret = handler.main(StringIO(message))
@@ -1153,7 +1890,11 @@
 
 
 
-You are not a registered user.
+You are not a registered user. Please register at:
+
+http://tracker.example/cgi-bin/roundup.cgi/bugs/user?template=register
+
+...before sending mail to the tracker.
 
 Unknown address: nonexisting at bork.bork.bork
 
@@ -1175,6 +1916,9 @@
 ''')
 
     def testEnc01(self):
+        self.db.user.set(self.mary_id,
+            realname='\xe4\xf6\xfc\xc4\xd6\xdc\xdf, Mary'.decode
+            ('latin-1').encode('utf-8'))
         self.doNewIssue()
         self._handle_mail('''Content-Type: text/plain;
   charset="iso-8859-1"
@@ -1196,8 +1940,10 @@
 Content-Type: text/plain; charset="utf-8"
 Subject: [issue1] Testing...
 To: chef at bork.bork.bork, richard at test.test
-From: "Contrary, Mary" <issue_tracker at your.tracker.email.domain.example>
-Reply-To: Roundup issue tracker <issue_tracker at your.tracker.email.domain.example>
+From: =?utf-8?b?w6TDtsO8w4TDlsOcw58sIE1hcnk=?=
+ <issue_tracker at your.tracker.email.domain.example>
+Reply-To: Roundup issue tracker
+ <issue_tracker at your.tracker.email.domain.example>
 MIME-Version: 1.0
 Message-Id: <followup_dummy_id>
 In-Reply-To: <dummy_test_message_id>
@@ -1207,7 +1953,8 @@
 Content-Transfer-Encoding: quoted-printable
 
 
-Contrary, Mary <mary at test.test> added the comment:
+=C3=A4=C3=B6=C3=BC=C3=84=C3=96=C3=9C=C3=9F, Mary <mary at test.test> added the=
+ comment:
 
 A message with encoding (encoded oe =C3=B6)
 
@@ -1244,7 +1991,8 @@
 Subject: [issue1] Testing...
 To: chef at bork.bork.bork, richard at test.test
 From: "Contrary, Mary" <issue_tracker at your.tracker.email.domain.example>
-Reply-To: Roundup issue tracker <issue_tracker at your.tracker.email.domain.example>
+Reply-To: Roundup issue tracker
+ <issue_tracker at your.tracker.email.domain.example>
 MIME-Version: 1.0
 Message-Id: <followup_dummy_id>
 In-Reply-To: <dummy_test_message_id>
@@ -1298,7 +2046,8 @@
 Subject: [issue1] Testing...
 To: chef at bork.bork.bork, richard at test.test
 From: "Contrary, Mary" <issue_tracker at your.tracker.email.domain.example>
-Reply-To: Roundup issue tracker <issue_tracker at your.tracker.email.domain.example>
+Reply-To: Roundup issue tracker
+ <issue_tracker at your.tracker.email.domain.example>
 MIME-Version: 1.0
 Message-Id: <followup_dummy_id>
 In-Reply-To: <dummy_test_message_id>
@@ -1375,7 +2124,8 @@
 Subject: [issue1] Testing...
 To: chef at bork.bork.bork
 From: richard <issue_tracker at your.tracker.email.domain.example>
-Reply-To: Roundup issue tracker <issue_tracker at your.tracker.email.domain.example>
+Reply-To: Roundup issue tracker
+ <issue_tracker at your.tracker.email.domain.example>
 MIME-Version: 1.0
 Message-Id: <followup_dummy_id>
 In-Reply-To: <dummy_test_message_id>
@@ -1877,6 +2627,22 @@
         assert not os.path.exists(SENDMAILDEBUG)
         self.assertEqual(self.db.keyword.get('1', 'name'), 'Bar')
 
+    def testOneCharSubject(self):
+        message = '''Content-Type: text/plain;
+  charset="iso-8859-1"
+From: Chef <chef at bork.bork.bork>
+To: issue_tracker at your.tracker.email.domain.example
+Subject: b
+Cc: richard at test.test
+Reply-To: chef at bork.bork.bork
+Message-Id: <dummy_test_message_id>
+
+'''
+        try:
+            self._handle_mail(message)
+        except MailUsageError:
+            self.fail('MailUsageError raised')
+
     def testIssueidLast(self):
         nodeid1 = self.doNewIssue()
         nodeid2 = self._handle_mail('''Content-Type: text/plain;
@@ -1893,6 +2659,290 @@
         assert nodeid1 == nodeid2
         self.assertEqual(self.db.issue.get(nodeid2, 'title'), "Testing...")
 
+    def testSecurityMessagePermissionContent(self):
+        id = self.doNewIssue()
+        issue = self.db.issue.getnode (id)
+        self.db.security.addRole(name='Nomsg')
+        self.db.security.addPermissionToRole('Nomsg', 'Email Access')
+        for cl in 'issue', 'file', 'keyword':
+            for p in 'View', 'Edit', 'Create':
+                self.db.security.addPermissionToRole('Nomsg', p, cl)
+        self.db.user.set(self.mary_id, roles='Nomsg')
+        nodeid = self._handle_mail('''Content-Type: text/plain;
+  charset="iso-8859-1"
+From: Chef <chef at bork.bork.bork>
+To: issue_tracker at your.tracker.email.domain.example
+Message-Id: <dummy_test_message_id_2>
+Subject: [issue%(id)s] Testing... [nosy=+mary]
+
+Just a test reply
+'''%locals())
+        assert os.path.exists(SENDMAILDEBUG)
+        self.compareMessages(self._get_mail(),
+'''FROM: roundup-admin at your.tracker.email.domain.example
+TO: chef at bork.bork.bork, richard at test.test
+Content-Type: text/plain; charset="utf-8"
+Subject: [issue1] Testing...
+To: richard at test.test
+From: "Bork, Chef" <issue_tracker at your.tracker.email.domain.example>
+Reply-To: Roundup issue tracker
+ <issue_tracker at your.tracker.email.domain.example>
+MIME-Version: 1.0
+Message-Id: <dummy_test_message_id_2>
+In-Reply-To: <dummy_test_message_id>
+X-Roundup-Name: Roundup issue tracker
+X-Roundup-Loop: hello
+X-Roundup-Issue-Status: chatting
+Content-Transfer-Encoding: quoted-printable
+
+
+Bork, Chef <chef at bork.bork.bork> added the comment:
+
+Just a test reply
+
+----------
+nosy: +mary
+status: unread -> chatting
+
+_______________________________________________________________________
+Roundup issue tracker <issue_tracker at your.tracker.email.domain.example>
+<http://tracker.example/cgi-bin/roundup.cgi/bugs/issue1>
+_______________________________________________________________________
+''')
+
+    def testOutlookAttachment(self):
+        message = '''X-MimeOLE: Produced By Microsoft Exchange V6.5
+Content-class: urn:content-classes:message
+MIME-Version: 1.0
+Content-Type: multipart/mixed;
+	boundary="----_=_NextPart_001_01CACA65.40A51CBC"
+Subject: Example of a failed outlook attachment e-mail
+Date: Tue, 23 Mar 2010 01:43:44 -0700
+Message-ID: <CA37F17219784343816CA6613D2E339205E7D0F9 at nrcwstexb1.nrc.ca>
+X-MS-Has-Attach: yes
+X-MS-TNEF-Correlator: 
+Thread-Topic: Example of a failed outlook attachment e-mail
+Thread-Index: AcrKJo/t3pUBBwTpSwWNE3LE67UBDQ==
+From: "Hugh" <richard at test.test>
+To: <richard at test.test>
+X-OriginalArrivalTime: 23 Mar 2010 08:45:57.0350 (UTC) FILETIME=[41893860:01CACA65]
+
+This is a multi-part message in MIME format.
+
+------_=_NextPart_001_01CACA65.40A51CBC
+Content-Type: multipart/alternative;
+	boundary="----_=_NextPart_002_01CACA65.40A51CBC"
+
+
+------_=_NextPart_002_01CACA65.40A51CBC
+Content-Type: text/plain;
+	charset="us-ascii"
+Content-Transfer-Encoding: quoted-printable
+
+
+Hi Richard,
+
+I suppose this isn't the exact message that was sent but is a resend of
+one of my trial messages that failed.  For your benefit I changed the
+subject line and am adding these words to the message body.  Should
+still be as problematic, but if you like I can resend an exact copy of a
+failed message changing nothing except putting your address instead of
+our tracker.
+
+Thanks very much for taking time to look into this.  Much appreciated.
+
+ <<battery backup>>=20
+
+------_=_NextPart_002_01CACA65.40A51CBC
+Content-Type: text/html;
+	charset="us-ascii"
+Content-Transfer-Encoding: quoted-printable
+
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 3.2//EN">
+<HTML>
+<HEAD>
+<META HTTP-EQUIV=3D"Content-Type" CONTENT=3D"text/html; =
+charset=3Dus-ascii">
+<META NAME=3D"Generator" CONTENT=3D"MS Exchange Server version =
+6.5.7654.12">
+<TITLE>Example of a failed outlook attachment e-mail</TITLE>
+</HEAD>
+<BODY>
+<!-- Converted from text/rtf format -->
+<BR>
+
+<P><FONT SIZE=3D2 FACE=3D"Arial">Hi Richard,</FONT>
+</P>
+
+<P><FONT SIZE=3D2 FACE=3D"Arial">I suppose this isn't the exact message =
+that was sent but is a resend of one of my trial messages that =
+failed.&nbsp; For your benefit I changed the subject line and am adding =
+these words to the message body.&nbsp; Should still be as problematic, =
+but if you like I can resend an exact copy of a failed message changing =
+nothing except putting your address instead of our tracker.</FONT></P>
+
+<P><FONT SIZE=3D2 FACE=3D"Arial">Thanks very much for taking time to =
+look into this.&nbsp; Much appreciated.</FONT>
+</P>
+<BR>
+
+<P><FONT FACE=3D"Arial" SIZE=3D2 COLOR=3D"#000000"> &lt;&lt;battery =
+backup&gt;&gt; </FONT>
+</P>
+
+</BODY>
+</HTML>
+------_=_NextPart_002_01CACA65.40A51CBC--
+
+------_=_NextPart_001_01CACA65.40A51CBC
+Content-Type: message/rfc822
+Content-Transfer-Encoding: 7bit
+
+X-MimeOLE: Produced By Microsoft Exchange V6.5
+MIME-Version: 1.0
+Content-Type: multipart/alternative;
+	boundary="----_=_NextPart_003_01CAC15A.29717800"
+X-OriginalArrivalTime: 11 Mar 2010 20:33:51.0249 (UTC) FILETIME=[28FEE010:01CAC15A]
+Content-class: urn:content-classes:message
+Subject: battery backup
+Date: Thu, 11 Mar 2010 13:33:43 -0700
+Message-ID: <p06240809c7bf02f9624c@[128.114.22.203]>
+X-MS-Has-Attach: 
+X-MS-TNEF-Correlator: 
+Thread-Topic: battery backup
+Thread-Index: AcrBWimtulTrSvBdQ2CcfZ8lyQdxmQ==
+From: "Jerry" <jerry at test.test>
+To: "Hugh" <hugh at test.test>
+
+This is a multi-part message in MIME format.
+
+------_=_NextPart_003_01CAC15A.29717800
+Content-Type: text/plain;
+	charset="iso-8859-1"
+Content-Transfer-Encoding: quoted-printable
+
+Dear Hugh,
+	A car batter has an energy capacity of ~ 500Wh.  A UPS=20
+battery is worse than this.
+
+if we need to provied 100kW for 30 minutes that will take 100 car=20
+batteries.  This seems like an awful lot of batteries.
+
+Of course I like your idea of making the time 1 minute, so we get to=20
+a more modest number of batteries
+
+Jerry
+
+
+------_=_NextPart_003_01CAC15A.29717800
+Content-Type: text/html;
+	charset="iso-8859-1"
+Content-Transfer-Encoding: quoted-printable
+
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 3.2//EN">
+<HTML>
+<HEAD>
+<META HTTP-EQUIV=3D"Content-Type" CONTENT=3D"text/html; =
+charset=3Diso-8859-1">
+<META NAME=3D"Generator" CONTENT=3D"MS Exchange Server version =
+6.5.7654.12">
+<TITLE>battery backup</TITLE>
+</HEAD>
+<BODY>
+<!-- Converted from text/plain format -->
+
+<P><FONT SIZE=3D2>Dear Hugh,</FONT>
+
+<BR>&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; <FONT SIZE=3D2>A car =
+batter has an energy capacity of ~ 500Wh.&nbsp; A UPS </FONT>
+
+<BR><FONT SIZE=3D2>battery is worse than this.</FONT>
+</P>
+
+<P><FONT SIZE=3D2>if we need to provied 100kW for 30 minutes that will =
+take 100 car </FONT>
+
+<BR><FONT SIZE=3D2>batteries.&nbsp; This seems like an awful lot of =
+batteries.</FONT>
+</P>
+
+<P><FONT SIZE=3D2>Of course I like your idea of making the time 1 =
+minute, so we get to </FONT>
+
+<BR><FONT SIZE=3D2>a more modest number of batteries</FONT>
+</P>
+
+<P><FONT SIZE=3D2>Jerry</FONT>
+</P>
+
+</BODY>
+</HTML>
+------_=_NextPart_003_01CAC15A.29717800--
+
+------_=_NextPart_001_01CACA65.40A51CBC--
+'''
+        nodeid = self._handle_mail(message)
+        assert not os.path.exists(SENDMAILDEBUG)
+        msgid = self.db.issue.get(nodeid, 'messages')[0]
+        self.assert_(self.db.msg.get(msgid, 'content').startswith('Hi Richard'))
+        self.assertEqual(self.db.msg.get(msgid, 'files'), ['1', '2'])
+        fileid = self.db.msg.get(msgid, 'files')[0]
+        self.assertEqual(self.db.file.get(fileid, 'type'), 'text/html')
+        fileid = self.db.msg.get(msgid, 'files')[1]
+        self.assertEqual(self.db.file.get(fileid, 'type'), 'message/rfc822')
+
+    def testForwardedMessageAttachment(self):
+        message = '''Return-Path: <rgg at test.test>
+Received: from localhost(127.0.0.1), claiming to be "[115.130.26.69]"
+via SMTP by localhost, id smtpdAAApLaWrq; Tue Apr 13 23:10:05 2010
+Message-ID: <4BC4F9C7.50409 at test.test>
+Date: Wed, 14 Apr 2010 09:09:59 +1000
+From: Rupert Goldie <rgg at test.test>
+User-Agent: Thunderbird 2.0.0.24 (Windows/20100228)
+MIME-Version: 1.0
+To: ekit issues <issues at test.test>
+Subject: [Fwd: PHP ERROR (fb)] post limit reached
+Content-Type: multipart/mixed; boundary="------------000807090608060304010403"
+
+This is a multi-part message in MIME format.
+--------------000807090608060304010403
+Content-Type: text/plain; charset=ISO-8859-1; format=flowed
+Content-Transfer-Encoding: 7bit
+
+Catch this exception and log it without emailing.
+
+--------------000807090608060304010403
+Content-Type: message/rfc822; name="PHP ERROR (fb).eml"
+Content-Transfer-Encoding: 7bit
+Content-Disposition: inline; filename="PHP ERROR (fb).eml"
+
+Return-Path: <ektravj at test.test>
+X-Sieve: CMU Sieve 2.2
+via SMTP by crown.off.ekorp.com, id smtpdAAA1JaW1o; Tue Apr 13 23:01:04 2010
+X-Virus-Scanned: by amavisd-new at ekit.com
+To: facebook-errors at test.test
+From: ektravj at test.test
+Subject: PHP ERROR (fb)
+Message-Id: <20100413230100.D601D27E84 at mail2.elax3.ekorp.com>
+Date: Tue, 13 Apr 2010 23:01:00 +0000 (UTC)
+
+[13-Apr-2010 22:49:02] PHP Fatal error:  Uncaught exception 'Exception' with message 'Facebook Error Message: Feed action request limit reached' in /app/01/www/virtual/fb.ekit.com/htdocs/includes/functions.php:280
+Stack trace:
+#0 /app/01/www/virtual/fb.ekit.com/htdocs/gateway/ekit/feed/index.php(178): fb_exceptions(Object(FacebookRestClientException))
+#1 {main}
+ thrown in /app/01/www/virtual/fb.ekit.com/htdocs/includes/functions.php on line 280
+
+
+--------------000807090608060304010403--
+'''
+        nodeid = self._handle_mail(message)
+        assert not os.path.exists(SENDMAILDEBUG)
+        msgid = self.db.issue.get(nodeid, 'messages')[0]
+        self.assertEqual(self.db.msg.get(msgid, 'content'),
+            'Catch this exception and log it without emailing.')
+        self.assertEqual(self.db.msg.get(msgid, 'files'), ['1'])
+        fileid = self.db.msg.get(msgid, 'files')[0]
+        self.assertEqual(self.db.file.get(fileid, 'type'), 'message/rfc822')
 
 def test_suite():
     suite = unittest.TestSuite()
@@ -1904,3 +2954,7 @@
     unittest.main(testRunner=runner)
 
 # vim: set filetype=python sts=4 sw=4 et si :
+
+
+
+

Added: tracker/roundup-src/test/test_memorydb.py
==============================================================================
--- (empty file)
+++ tracker/roundup-src/test/test_memorydb.py	Thu Aug  4 15:46:52 2011
@@ -0,0 +1,71 @@
+# $Id: test_memorydb.py,v 1.4 2004-11-03 01:34:21 richard Exp $ 
+
+import unittest, os, shutil, time
+
+from roundup import hyperdb
+
+from db_test_base import DBTest, ROTest, SchemaTest, config, setupSchema
+import memorydb
+
+class memorydbOpener:
+    module = memorydb
+
+    def nuke_database(self):
+        # really kill it
+        self.db = None
+
+    db = None
+    def open_database(self):
+        if self.db is None:
+            self.db = self.module.Database(config, 'admin')
+        return self.db
+
+    def setUp(self):
+        self.open_database()
+        setupSchema(self.db, 1, self.module)
+
+    def tearDown(self):
+        if self.db is not None:
+            self.db.close()
+
+    # nuke and re-create db for restore
+    def nukeAndCreate(self):
+        self.db.close()
+        self.nuke_database()
+        self.db = self.module.Database(config, 'admin')
+        setupSchema(self.db, 0, self.module)
+
+class memorydbDBTest(memorydbOpener, DBTest):
+    pass
+
+class memorydbROTest(memorydbOpener, ROTest):
+    def setUp(self):
+        self.db = self.module.Database(config)
+        setupSchema(self.db, 0, self.module)
+
+class memorydbSchemaTest(memorydbOpener, SchemaTest):
+    pass
+
+from session_common import DBMTest
+class memorydbSessionTest(memorydbOpener, DBMTest):
+    def setUp(self):
+        self.db = self.module.Database(config, 'admin')
+        setupSchema(self.db, 1, self.module)
+        self.sessions = self.db.sessions
+
+def test_suite():
+    suite = unittest.TestSuite()
+    print 'Including memorydb tests'
+    suite.addTest(unittest.makeSuite(memorydbDBTest))
+    suite.addTest(unittest.makeSuite(memorydbROTest))
+    suite.addTest(unittest.makeSuite(memorydbSchemaTest))
+    suite.addTest(unittest.makeSuite(memorydbSessionTest))
+    return suite
+
+if __name__ == '__main__':
+    runner = unittest.TextTestRunner()
+    unittest.main(testRunner=runner)
+
+
+# vim: set filetype=python ts=4 sw=4 et si
+

Modified: tracker/roundup-src/test/test_multipart.py
==============================================================================
--- tracker/roundup-src/test/test_multipart.py	(original)
+++ tracker/roundup-src/test/test_multipart.py	Thu Aug  4 15:46:52 2011
@@ -23,13 +23,25 @@
 from roundup.mailgw import Message
 
 class TestMessage(Message):
+    # A note on message/rfc822: The content of such an attachment is an
+    # email with at least one header line. RFC2046 tells us: """   A
+    # media type of "message/rfc822" indicates that the body contains an
+    # encapsulated message, with the syntax of an RFC 822 message.
+    # However, unlike top-level RFC 822 messages, the restriction that
+    # each "message/rfc822" body must include a "From", "Date", and at
+    # least one destination header is removed and replaced with the
+    # requirement that at least one of "From", "Subject", or "Date" must
+    # be present."""
+    # This means we have to add a newline after the mime-header before
+    # the subject, otherwise the subject is part of the mime header not
+    # part of the email header.
     table = {'multipart/signed': '    boundary="boundary-%(indent)s";\n',
              'multipart/mixed': '    boundary="boundary-%(indent)s";\n',
              'multipart/alternative': '    boundary="boundary-%(indent)s";\n',
              'text/plain': '    name="foo.txt"\nfoo\n',
              'application/pgp-signature': '    name="foo.gpg"\nfoo\n',
              'application/pdf': '    name="foo.pdf"\nfoo\n',
-             'message/rfc822': 'Subject: foo\n\nfoo\n'}
+             'message/rfc822': '\nSubject: foo\n\nfoo\n'}
 
     def __init__(self, spec):
         """Create a basic MIME message according to 'spec'.
@@ -215,7 +227,7 @@
 multipart/mixed
     message/rfc822""",
                   (None,
-                   [('foo', 'message/rfc822', 'foo\n')]))
+                   [('foo.eml', 'message/rfc822', 'Subject: foo\n\nfoo\n')]))
 
 def test_suite():
     suite = unittest.TestSuite()

Modified: tracker/roundup-src/test/test_mysql.py
==============================================================================
--- tracker/roundup-src/test/test_mysql.py	(original)
+++ tracker/roundup-src/test/test_mysql.py	Thu Aug  4 15:46:52 2011
@@ -23,6 +23,7 @@
 from roundup.backends import get_backend, have_backend
 
 from db_test_base import DBTest, ROTest, config, SchemaTest, ClassicInitTest
+from db_test_base import ConcurrentDBTest, FilterCacheTest
 
 
 class mysqlOpener:
@@ -63,6 +64,24 @@
         ClassicInitTest.tearDown(self)
         self.nuke_database()
 
+class mysqlConcurrencyTest(mysqlOpener, ConcurrentDBTest):
+    backend = 'mysql'
+    def setUp(self):
+        mysqlOpener.setUp(self)
+        ConcurrentDBTest.setUp(self)
+    def tearDown(self):
+        ConcurrentDBTest.tearDown(self)
+        self.nuke_database()
+
+class mysqlFilterCacheTest(mysqlOpener, FilterCacheTest):
+    backend = 'mysql'
+    def setUp(self):
+        mysqlOpener.setUp(self)
+        FilterCacheTest.setUp(self)
+    def tearDown(self):
+        FilterCacheTest.tearDown(self)
+        self.nuke_database()
+
 from session_common import RDBMSTest
 class mysqlSessionTest(mysqlOpener, RDBMSTest):
     def setUp(self):
@@ -92,6 +111,8 @@
         suite.addTest(unittest.makeSuite(mysqlSchemaTest))
         suite.addTest(unittest.makeSuite(mysqlClassicInitTest))
         suite.addTest(unittest.makeSuite(mysqlSessionTest))
+        suite.addTest(unittest.makeSuite(mysqlConcurrencyTest))
+        suite.addTest(unittest.makeSuite(mysqlFilterCacheTest))
     return suite
 
 if __name__ == '__main__':

Modified: tracker/roundup-src/test/test_postgresql.py
==============================================================================
--- tracker/roundup-src/test/test_postgresql.py	(original)
+++ tracker/roundup-src/test/test_postgresql.py	Thu Aug  4 15:46:52 2011
@@ -22,6 +22,7 @@
 from roundup.hyperdb import DatabaseError
 
 from db_test_base import DBTest, ROTest, config, SchemaTest, ClassicInitTest
+from db_test_base import ConcurrentDBTest, FilterCacheTest
 
 from roundup.backends import get_backend, have_backend
 
@@ -57,6 +58,26 @@
         ROTest.tearDown(self)
         postgresqlOpener.tearDown(self)
 
+class postgresqlConcurrencyTest(postgresqlOpener, ConcurrentDBTest):
+    backend = 'postgresql'
+    def setUp(self):
+        postgresqlOpener.setUp(self)
+        ConcurrentDBTest.setUp(self)
+
+    def tearDown(self):
+        ConcurrentDBTest.tearDown(self)
+        postgresqlOpener.tearDown(self)
+
+class postgresqlFilterCacheTest(postgresqlOpener, FilterCacheTest):
+    backend = 'postgresql'
+    def setUp(self):
+        postgresqlOpener.setUp(self)
+        FilterCacheTest.setUp(self)
+
+    def tearDown(self):
+        FilterCacheTest.tearDown(self)
+        postgresqlOpener.tearDown(self)
+
 class postgresqlSchemaTest(postgresqlOpener, SchemaTest):
     def setUp(self):
         postgresqlOpener.setUp(self)
@@ -102,6 +123,8 @@
     suite.addTest(unittest.makeSuite(postgresqlSchemaTest))
     suite.addTest(unittest.makeSuite(postgresqlClassicInitTest))
     suite.addTest(unittest.makeSuite(postgresqlSessionTest))
+    suite.addTest(unittest.makeSuite(postgresqlConcurrencyTest))
+    suite.addTest(unittest.makeSuite(postgresqlFilterCacheTest))
     return suite
 
 # vim: set et sts=4 sw=4 :

Modified: tracker/roundup-src/test/test_security.py
==============================================================================
--- tracker/roundup-src/test/test_security.py	(original)
+++ tracker/roundup-src/test/test_security.py	Thu Aug  4 15:46:52 2011
@@ -23,7 +23,7 @@
 import os, unittest, shutil
 
 from roundup import backends
-from roundup.password import Password
+import roundup.password
 from db_test_base import setupSchema, MyTestCase, config
 
 class PermissionTest(MyTestCase):
@@ -178,6 +178,65 @@
         self.assertEquals(has('Test', none, 'test', itemid='1'), 0)
         self.assertEquals(has('Test', none, 'test', itemid='2'), 0)
 
+    def testTransitiveSearchPermissions(self):
+        add = self.db.security.addPermission
+        has = self.db.security.hasSearchPermission
+        addRole = self.db.security.addRole
+        addToRole = self.db.security.addPermissionToRole
+        addRole(name='User')
+        addRole(name='Anonymous')
+        addRole(name='Issue')
+        addRole(name='Msg')
+        addRole(name='UV')
+        user = self.db.user.create(username='user1', roles='User')
+        anon = self.db.user.create(username='anonymous', roles='Anonymous')
+        ui = self.db.user.create(username='user2', roles='Issue')
+        uim = self.db.user.create(username='user3', roles='Issue,Msg')
+        uimu = self.db.user.create(username='user4', roles='Issue,Msg,UV')
+        iv = add(name="View", klass="issue")
+        addToRole('User', iv)
+        addToRole('Anonymous', iv)
+        addToRole('Issue', iv)
+        ms = add(name="Search", klass="msg")
+        addToRole('User', ms)
+        addToRole('Anonymous', ms)
+        addToRole('Msg', ms)
+        uv = add(name="View", klass="user")
+        addToRole('User', uv)
+        addToRole('UV', uv)
+        self.assertEquals(has(anon, 'issue', 'messages'), 1)
+        self.assertEquals(has(anon, 'issue', 'messages.author'), 0)
+        self.assertEquals(has(anon, 'issue', 'messages.author.username'), 0)
+        self.assertEquals(has(anon, 'issue', 'messages.recipients'), 0)
+        self.assertEquals(has(anon, 'issue', 'messages.recipients.username'), 0)
+        self.assertEquals(has(user, 'issue', 'messages'), 1)
+        self.assertEquals(has(user, 'issue', 'messages.author'), 1)
+        self.assertEquals(has(user, 'issue', 'messages.author.username'), 1)
+        self.assertEquals(has(user, 'issue', 'messages.recipients'), 1)
+        self.assertEquals(has(user, 'issue', 'messages.recipients.username'), 1)
+
+        self.assertEquals(has(ui, 'issue', 'messages'), 0)
+        self.assertEquals(has(ui, 'issue', 'messages.author'), 0)
+        self.assertEquals(has(ui, 'issue', 'messages.author.username'), 0)
+        self.assertEquals(has(ui, 'issue', 'messages.recipients'), 0)
+        self.assertEquals(has(ui, 'issue', 'messages.recipients.username'), 0)
+
+        self.assertEquals(has(uim, 'issue', 'messages'), 1)
+        self.assertEquals(has(uim, 'issue', 'messages.author'), 0)
+        self.assertEquals(has(uim, 'issue', 'messages.author.username'), 0)
+        self.assertEquals(has(uim, 'issue', 'messages.recipients'), 0)
+        self.assertEquals(has(uim, 'issue', 'messages.recipients.username'), 0)
+
+        self.assertEquals(has(uimu, 'issue', 'messages'), 1)
+        self.assertEquals(has(uimu, 'issue', 'messages.author'), 1)
+        self.assertEquals(has(uimu, 'issue', 'messages.author.username'), 1)
+        self.assertEquals(has(uimu, 'issue', 'messages.recipients'), 1)
+        self.assertEquals(has(uimu, 'issue', 'messages.recipients.username'), 1)
+
+    # roundup.password has its own built-in test, call it.
+    def test_password(self):
+        roundup.password.test()
+
 def test_suite():
     suite = unittest.TestSuite()
     suite.addTest(unittest.makeSuite(PermissionTest))

Modified: tracker/roundup-src/test/test_sqlite.py
==============================================================================
--- tracker/roundup-src/test/test_sqlite.py	(original)
+++ tracker/roundup-src/test/test_sqlite.py	Thu Aug  4 15:46:52 2011
@@ -21,6 +21,7 @@
 from roundup.backends import get_backend, have_backend
 
 from db_test_base import DBTest, ROTest, SchemaTest, ClassicInitTest, config
+from db_test_base import ConcurrentDBTest, FilterCacheTest
 
 class sqliteOpener:
     if have_backend('sqlite'):
@@ -41,6 +42,12 @@
 class sqliteClassicInitTest(ClassicInitTest):
     backend = 'sqlite'
 
+class sqliteConcurrencyTest(ConcurrentDBTest):
+    backend = 'sqlite'
+
+class sqliteFilterCacheTest(sqliteOpener, FilterCacheTest):
+    backend = 'sqlite'
+
 from session_common import RDBMSTest
 class sqliteSessionTest(sqliteOpener, RDBMSTest):
     pass
@@ -57,6 +64,8 @@
     suite.addTest(unittest.makeSuite(sqliteSchemaTest))
     suite.addTest(unittest.makeSuite(sqliteClassicInitTest))
     suite.addTest(unittest.makeSuite(sqliteSessionTest))
+    suite.addTest(unittest.makeSuite(sqliteConcurrencyTest))
+    suite.addTest(unittest.makeSuite(sqliteFilterCacheTest))
     return suite
 
 if __name__ == '__main__':

Modified: tracker/roundup-src/test/test_templating.py
==============================================================================
--- tracker/roundup-src/test/test_templating.py	(original)
+++ tracker/roundup-src/test/test_templating.py	Thu Aug  4 15:46:52 2011
@@ -147,6 +147,7 @@
         p = StringHTMLProperty(self.client, 'test', '1', None, 'test', '')
         def t(s): return p.hyper_re.sub(p._hyper_repl, s)
         ae = self.assertEqual
+        ae(t('item123123123123'), 'item123123123123')
         ae(t('http://roundup.net/'),
            '<a href="http://roundup.net/">http://roundup.net/</a>')
         ae(t('&lt;HTTP://roundup.net/&gt;'),

Modified: tracker/roundup-src/test/test_xmlrpc.py
==============================================================================
--- tracker/roundup-src/test/test_xmlrpc.py	(original)
+++ tracker/roundup-src/test/test_xmlrpc.py	Thu Aug  4 15:46:52 2011
@@ -115,6 +115,88 @@
         finally:
             self.db.setCurrentUser('joe')
 
+    def testAuthFilter(self):
+        # this checks if we properly check for search permissions
+        self.db.security.permissions = {}
+        self.db.security.addRole(name='User')
+        self.db.security.addRole(name='Project')
+        self.db.security.addPermissionToRole('User', 'Web Access')
+        self.db.security.addPermissionToRole('Project', 'Web Access')
+        # Allow viewing keyword
+        p = self.db.security.addPermission(name='View', klass='keyword')
+        self.db.security.addPermissionToRole('User', p)
+        # Allow viewing interesting things (but not keyword) on issue
+        # But users might only view issues where they are on nosy
+        # (so in the real world the check method would be better)
+        p = self.db.security.addPermission(name='View', klass='issue',
+            properties=("title", "status"), check=lambda x,y,z: True)
+        self.db.security.addPermissionToRole('User', p)
+        # Allow role "Project" access to whole issue
+        p = self.db.security.addPermission(name='View', klass='issue')
+        self.db.security.addPermissionToRole('Project', p)
+        # Allow all access to status:
+        p = self.db.security.addPermission(name='View', klass='status')
+        self.db.security.addPermissionToRole('User', p)
+        self.db.security.addPermissionToRole('Project', p)
+
+        keyword = self.db.keyword
+        status = self.db.status
+        issue = self.db.issue
+
+        d1 = keyword.create(name='d1')
+        d2 = keyword.create(name='d2')
+        open = status.create(name='open')
+        closed = status.create(name='closed')
+        issue.create(title='i1', status=open, keyword=[d2])
+        issue.create(title='i2', status=open, keyword=[d1])
+        issue.create(title='i2', status=closed, keyword=[d1])
+
+        chef = self.db.user.create(username = 'chef', roles='User, Project')
+        joe  = self.db.user.lookup('joe')
+
+        # Conditionally allow view of whole issue (check is False here,
+        # this might check for keyword owner in the real world)
+        p = self.db.security.addPermission(name='View', klass='issue',
+            check=lambda x,y,z: False)
+        self.db.security.addPermissionToRole('User', p)
+        # Allow user to search for issue.status
+        p = self.db.security.addPermission(name='Search', klass='issue',
+            properties=("status",))
+        self.db.security.addPermissionToRole('User', p)
+
+        keyw = {'keyword':self.db.keyword.lookup('d1')}
+        stat = {'status':self.db.status.lookup('open')}
+        keygroup = keysort = [('+', 'keyword')]
+        self.db.commit()
+
+        # Filter on keyword ignored for role 'User':
+        r = self.server.filter('issue', None, keyw)
+        self.assertEqual(r, ['1', '2', '3'])
+        # Filter on status works for all:
+        r = self.server.filter('issue', None, stat)
+        self.assertEqual(r, ['1', '2'])
+        # Sorting and grouping for class User fails:
+        r = self.server.filter('issue', None, {}, sort=keysort)
+        self.assertEqual(r, ['1', '2', '3'])
+        r = self.server.filter('issue', None, {}, group=keygroup)
+        self.assertEqual(r, ['1', '2', '3'])
+
+        self.db.close()
+        self.db = self.instance.open('chef')
+        self.server = RoundupInstance(self.db, self.instance.actions, None)
+
+        # Filter on keyword works for role 'Project':
+        r = self.server.filter('issue', None, keyw)
+        self.assertEqual(r, ['2', '3'])
+        # Filter on status works for all:
+        r = self.server.filter('issue', None, stat)
+        self.assertEqual(r, ['1', '2'])
+        # Sorting and grouping for class Project works:
+        r = self.server.filter('issue', None, {}, sort=keysort)
+        self.assertEqual(r, ['2', '3', '1'])
+        r = self.server.filter('issue', None, {}, group=keygroup)
+        self.assertEqual(r, ['2', '3', '1'])
+
 def test_suite():
     suite = unittest.TestSuite()
     for l in list_backends():


More information about the Python-checkins mailing list