[Python-checkins] distutils2: merged tarek's changes

tarek.ziade python-checkins at python.org
Sun Jul 4 11:48:39 CEST 2010


tarek.ziade pushed ec619c341cd2 to distutils2:

http://hg.python.org/distutils2/rev/ec619c341cd2
changeset:   279:ec619c341cd2
parent:      278:03bc7496c3b2
parent:      188:b7fd258b33e7
user:        Konrad Delong <konryd at gmail.com>
date:        Tue Jun 01 11:23:39 2010 -0400
summary:     merged tarek's changes
files:       src/distutils2/filelist.py, src/distutils2/tests/test_filelist.py, src/distutils2/tests/test_text_file.py, src/distutils2/text_file.py

diff --git a/.hgignore b/.hgignore
--- a/.hgignore
+++ b/.hgignore
@@ -1,3 +1,5 @@
 .*\.pyc$
 .*\.pyo$
 ^docs/build
+.*\.swp$
+
diff --git a/src/CONTRIBUTORS.txt b/src/CONTRIBUTORS.txt
new file mode 100644
--- /dev/null
+++ b/src/CONTRIBUTORS.txt
@@ -0,0 +1,25 @@
+============
+Contributors
+============
+
+Distutils2 is a project that was started and that is maintained by
+Tarek Ziadé, and many people are contributing to the project.
+
+If you did, please add your name below in alphabetical order !
+
+Thanks to:
+
+- Pior Bastida
+- Titus Brown
+- Nicolas Cadou
+- Josip Djolonga
+- Yannick Gringas
+- Carl Meyer
+- Michael Mulich
+- George Peris
+- Sean Reifschneider
+- Erik Rose
+- Brian Rosner
+- Alexandre Vassalotti
+- Martin von Löwis
+
diff --git a/src/DEVNOTES.txt b/src/DEVNOTES.txt
new file mode 100644
--- /dev/null
+++ b/src/DEVNOTES.txt
@@ -0,0 +1,10 @@
+Notes for developers
+====================
+
+- Distutils2 runs from 2.4 to 3.2 (3.x not implemented yet), so
+  make sure you don't use a syntax that doesn't work under
+  a specific Python version.
+
+- Always run tests.sh before you push a change. This implies
+  that you have all Python versions installed.
+
diff --git a/src/distutils2/_backport/pkgutil.py b/src/distutils2/_backport/pkgutil.py
--- a/src/distutils2/_backport/pkgutil.py
+++ b/src/distutils2/_backport/pkgutil.py
@@ -11,16 +11,18 @@
 from types import ModuleType
 from distutils2.errors import DistutilsError
 from distutils2.metadata import DistributionMetadata
-from distutils2.version import suggest_normalized_version
+from distutils2.version import suggest_normalized_version, VersionPredicate
 
 __all__ = [
     'get_importer', 'iter_importers', 'get_loader', 'find_loader',
     'walk_packages', 'iter_modules',
     'ImpImporter', 'ImpLoader', 'read_code', 'extend_path',
-    'Distribution', 'distinfo_dirname', 'get_distributions',
-    'get_distribution', 'get_file_users', 
+    'Distribution', 'EggInfoDistribution', 'distinfo_dirname',
+    'get_distributions', 'get_distribution', 'get_file_users',
+    'provides_distribution', 'obsoletes_distribution',
 ]
 
+
 def read_code(stream):
     # This helper is needed in order for the PEP 302 emulation to
     # correctly handle compiled files
@@ -37,6 +39,7 @@
 def simplegeneric(func):
     """Make a trivial single-dispatch generic function"""
     registry = {}
+
     def wrapper(*args, **kw):
         ob = args[0]
         try:
@@ -47,6 +50,7 @@
             mro = cls.__mro__
         except AttributeError:
             try:
+
                 class cls(cls, object):
                     pass
                 mro = cls.__mro__[1:]
@@ -128,7 +132,7 @@
                 # don't traverse path items we've seen before
                 path = [p for p in path if not seen(p)]
 
-                for item in walk_packages(path, name+'.', onerror):
+                for item in walk_packages(path, name + '.', onerror):
                     yield item
 
 
@@ -206,7 +210,7 @@
 
         for fn in filenames:
             modname = inspect.getmodulename(fn)
-            if modname=='__init__' or modname in yielded:
+            if modname == '__init__' or modname in yielded:
                 continue
 
             path = os.path.join(self.path, fn)
@@ -216,7 +220,7 @@
                 modname = fn
                 for fn in os.listdir(path):
                     subname = inspect.getmodulename(fn)
-                    if subname=='__init__':
+                    if subname == '__init__':
                         ispkg = True
                         break
                 else:
@@ -255,7 +259,7 @@
     def _reopen(self):
         if self.file and self.file.closed:
             mod_type = self.etc[2]
-            if mod_type==imp.PY_SOURCE:
+            if mod_type == imp.PY_SOURCE:
                 self.file = open(self.filename, 'rU')
             elif mod_type in (imp.PY_COMPILED, imp.C_EXTENSION):
                 self.file = open(self.filename, 'rb')
@@ -270,22 +274,22 @@
 
     def is_package(self, fullname):
         fullname = self._fix_name(fullname)
-        return self.etc[2]==imp.PKG_DIRECTORY
+        return self.etc[2] == imp.PKG_DIRECTORY
 
     def get_code(self, fullname=None):
         fullname = self._fix_name(fullname)
         if self.code is None:
             mod_type = self.etc[2]
-            if mod_type==imp.PY_SOURCE:
+            if mod_type == imp.PY_SOURCE:
                 source = self.get_source(fullname)
                 self.code = compile(source, self.filename, 'exec')
-            elif mod_type==imp.PY_COMPILED:
+            elif mod_type == imp.PY_COMPILED:
                 self._reopen()
                 try:
                     self.code = read_code(self.file)
                 finally:
                     self.file.close()
-            elif mod_type==imp.PKG_DIRECTORY:
+            elif mod_type == imp.PKG_DIRECTORY:
                 self.code = self._get_delegate().get_code()
         return self.code
 
@@ -293,29 +297,28 @@
         fullname = self._fix_name(fullname)
         if self.source is None:
             mod_type = self.etc[2]
-            if mod_type==imp.PY_SOURCE:
+            if mod_type == imp.PY_SOURCE:
                 self._reopen()
                 try:
                     self.source = self.file.read()
                 finally:
                     self.file.close()
-            elif mod_type==imp.PY_COMPILED:
+            elif mod_type == imp.PY_COMPILED:
                 if os.path.exists(self.filename[:-1]):
                     f = open(self.filename[:-1], 'rU')
                     self.source = f.read()
                     f.close()
-            elif mod_type==imp.PKG_DIRECTORY:
+            elif mod_type == imp.PKG_DIRECTORY:
                 self.source = self._get_delegate().get_source()
         return self.source
 
-
     def _get_delegate(self):
         return ImpImporter(self.filename).find_module('__init__')
 
     def get_filename(self, fullname=None):
         fullname = self._fix_name(fullname)
         mod_type = self.etc[2]
-        if self.etc[2]==imp.PKG_DIRECTORY:
+        if self.etc[2] == imp.PKG_DIRECTORY:
             return self._get_delegate().get_filename()
         elif self.etc[2] in (imp.PY_SOURCE, imp.PY_COMPILED, imp.C_EXTENSION):
             return self.filename
@@ -339,16 +342,16 @@
 
             fn = fn[plen:].split(os.sep)
 
-            if len(fn)==2 and fn[1].startswith('__init__.py'):
+            if len(fn) == 2 and fn[1].startswith('__init__.py'):
                 if fn[0] not in yielded:
                     yielded[fn[0]] = 1
                     yield fn[0], True
 
-            if len(fn)!=1:
+            if len(fn) != 1:
                 continue
 
             modname = inspect.getmodulename(fn[0])
-            if modname=='__init__':
+            if modname == '__init__':
                 continue
 
             if modname and '.' not in modname and modname not in yielded:
@@ -436,6 +439,7 @@
     if '.' not in fullname:
         yield ImpImporter()
 
+
 def get_loader(module_or_name):
     """Get a PEP 302 "loader" object for module_or_name
 
@@ -461,6 +465,7 @@
         fullname = module_or_name
     return find_loader(fullname)
 
+
 def find_loader(fullname):
     """Find a PEP 302 "loader" object for fullname
 
@@ -551,6 +556,7 @@
 
     return path
 
+
 def get_data(package, resource):
     """Get a resource from a package.
 
@@ -594,6 +600,7 @@
 
 DIST_FILES = ('INSTALLER', 'METADATA', 'RECORD', 'REQUESTED',)
 
+
 class Distribution(object):
     """Created with the *path* of the ``.dist-info`` directory provided to the
     constructor. It reads the metadata contained in METADATA when it is
@@ -604,7 +611,7 @@
     name = ''
     """The name of the distribution."""
     metadata = None
-    """A :class:`distutils2.metadata.DistributionMetadata` instance loaded with 
+    """A :class:`distutils2.metadata.DistributionMetadata` instance loaded with
     the distribution's METADATA file."""
     requested = False
     """A boolean that indicates whether the REQUESTED metadata file is present
@@ -620,7 +627,7 @@
         RECORD = os.path.join(self.path, 'RECORD')
         record_reader = csv_reader(open(RECORD, 'rb'), delimiter=',')
         for row in record_reader:
-            path, md5, size = row[:] + [ None for i in xrange(len(row), 3) ]
+            path, md5, size = row[:] + [None for i in xrange(len(row), 3)]
             if local:
                 path = path.replace('/', os.sep)
                 path = os.path.join(sys.prefix, path)
@@ -635,7 +642,6 @@
 
         A local absolute path is an absolute path in which occurrences of
         ``'/'`` have been replaced by the system separator given by ``os.sep``.
-
         :parameter local: flag to say if the path should be returned a local
                           absolute path
         :type local: boolean
@@ -643,10 +649,9 @@
         """
         return self._get_records(local)
 
-
     def uses(self, path):
         """
-        Returns ``True`` if path is listed in RECORD. *path* can be a local 
+        Returns ``True`` if path is listed in RECORD. *path* can be a local
         absolute path or a relative ``'/'``-separated path.
 
         :rtype: boolean
@@ -663,8 +668,8 @@
         ``file`` instance for the file pointed by *path*.
 
         :parameter path: a ``'/'``-separated path relative to the ``.dist-info``
-                         directory or an absolute path; If *path* is an absolute 
-                         path and doesn't start with the ``.dist-info``
+                         directory or an absolute path; If *path* is an
+                         absolute path and doesn't start with the ``.dist-info``
                          directory path, a :class:`DistutilsError` is raised
         :type path: string
         :parameter binary: If *binary* is ``True``, opens the file in read-only
@@ -696,8 +701,8 @@
 
     def get_distinfo_files(self, local=False):
         """
-        Iterates over the RECORD entries and returns paths for each line if the 
-        path is pointing to a file located in the ``.dist-info`` directory or 
+        Iterates over the RECORD entries and returns paths for each line if the
+        path is pointing to a file located in the ``.dist-info`` directory or
         one of its subdirectories.
 
         :parameter local: If *local* is ``True``, each returned path is
@@ -710,16 +715,42 @@
             yield path
 
 
+class EggInfoDistribution(object):
+    """Created with the *path* of the ``.egg-info`` directory or file provided
+    to the constructor. It reads the metadata contained in the file itself, or
+    if the given path happens to be a directory, the metadata is read from the
+    file PKG-INFO under that directory."""
+
+    name = ''
+    """The name of the distribution."""
+    metadata = None
+    """A :class:`distutils2.metadata.DistributionMetadata` instance loaded with
+    the distribution's METADATA file."""
+
+    def __init__(self, path):
+        if os.path.isdir(path):
+            path = os.path.join(path, 'PKG-INFO')
+        self.metadata = DistributionMetadata(path=path)
+        self.name = self.metadata['name']
+
+    def get_installed_files(self, local=False):
+        return []
+
+    def uses(self, path):
+        return False
+
+
 def _normalize_dist_name(name):
     """Returns a normalized name from the given *name*.
     :rtype: string"""
     return name.replace('-', '_')
 
+
 def distinfo_dirname(name, version):
     """
     The *name* and *version* parameters are converted into their
     filename-escaped form, i.e. any ``'-'`` characters are replaced with ``'_'``
-    other than the one in ``'dist-info'`` and the one separating the name from 
+    other than the one in ``'dist-info'`` and the one separating the name from
     the version number.
 
     :parameter name: is converted to a standard distribution name by replacing
@@ -743,13 +774,16 @@
         normalized_version = version
     return '-'.join([name, normalized_version]) + file_extension
 
-def get_distributions():
+
+def get_distributions(use_egg_info=False):
     """
     Provides an iterator that looks for ``.dist-info`` directories in
     ``sys.path`` and returns :class:`Distribution` instances for each one of
-    them.
+    them. If the parameters *use_egg_info* is ``True``, then the ``.egg-info``
+    files and directores are iterated as well.
 
-    :rtype: iterator of :class:`Distribution` instances"""
+    :rtype: iterator of :class:`Distribution` and :class:`EggInfoDistribution`
+            instances"""
     for path in sys.path:
         realpath = os.path.realpath(path)
         if not os.path.isdir(realpath):
@@ -758,25 +792,117 @@
             if dir.endswith('.dist-info'):
                 dist = Distribution(os.path.join(realpath, dir))
                 yield dist
+            elif use_egg_info and dir.endswith('.egg-info'):
+                dist = EggInfoDistribution(os.path.join(realpath, dir))
+                yield dist
 
-def get_distribution(name):
+
+def get_distribution(name, use_egg_info=False):
     """
     Scans all elements in ``sys.path`` and looks for all directories ending with
-    ``.dist-info``. Returns a :class:`Distribution` corresponding to the 
+    ``.dist-info``. Returns a :class:`Distribution` corresponding to the
     ``.dist-info`` directory that contains the METADATA that matches *name* for
-    the *name* metadata.
+    the *name* metadata field.
+    If no distribution exists with the given *name* and the parameter
+    *use_egg_info* is set to ``True``, then all files and directories ending
+    with ``.egg-info`` are scanned. A :class:`EggInfoDistribution` instance is
+    returned if one is found that has metadata that matches *name* for the
+    *name* metadata field.
 
     This function only returns the first result founded, as no more than one
     value is expected. If the directory is not found, ``None`` is returned.
 
-    :rtype: :class:`Distribution` or None"""
+    :rtype: :class:`Distribution` or :class:`EggInfoDistribution: or None"""
     found = None
     for dist in get_distributions():
         if dist.name == name:
             found = dist
             break
+    if use_egg_info:
+        for dist in get_distributions(True):
+            if dist.name == name:
+                found = dist
+                break
     return found
 
+
+def obsoletes_distribution(name, version=None, use_egg_info=False):
+    """
+    Iterates over all distributions to find which distributions obsolete *name*.
+    If a *version* is provided, it will be used to filter the results.
+    If the argument *use_egg_info* is set to ``True``, then ``.egg-info``
+    distributions will be considered as well.
+
+    :type name: string
+    :type version: string
+    :parameter name:
+    """
+    for dist in get_distributions(use_egg_info):
+        obsoleted = dist.metadata['Obsoletes-Dist'] + dist.metadata['Obsoletes']
+        for obs in obsoleted:
+            o_components = obs.split(' ', 1)
+            if len(o_components) == 1 or version is None:
+                if name == o_components[0]:
+                    yield dist
+                    break
+            else:
+                try:
+                    predicate = VersionPredicate(obs)
+                except ValueError:
+                    raise DistutilsError(('Distribution %s has ill formed' +
+                                          ' obsoletes field') % (dist.name,))
+                if name == o_components[0] and predicate.match(version):
+                    yield dist
+                    break
+
+
+def provides_distribution(name, version=None, use_egg_info=False):
+    """
+    Iterates over all distributions to find which distributions provide *name*.
+    If a *version* is provided, it will be used to filter the results. Scans
+    all elements in ``sys.path``  and looks for all directories ending with
+    ``.dist-info``. Returns a :class:`Distribution`  corresponding to the
+    ``.dist-info`` directory that contains a ``METADATA`` that matches *name*
+    for the name metadata. If the argument *use_egg_info* is set to ``True``,
+    then all files and directories ending with ``.egg-info`` are considered
+    as well and returns an :class:`EggInfoDistribution` instance.
+
+    This function only returns the first result founded, since no more than
+    one values are expected. If the directory is not found, returns ``None``.
+
+    :parameter version: a version specifier that indicates the version
+                        required, conforming to the format in ``PEP-345``
+
+    :type name: string
+    :type version: string
+    """
+    predicate = None
+    if not version is None:
+        try:
+            predicate = VersionPredicate(name + ' (' + version + ')')
+        except ValueError:
+            raise DistutilsError('Invalid name or version')
+
+    for dist in get_distributions(use_egg_info):
+        provided = dist.metadata['Provides-Dist'] + dist.metadata['Provides']
+
+        for p in provided:
+            p_components = p.split(' ', 1)
+            if len(p_components) == 1 or predicate is None:
+                if name == p_components[0]:
+                    yield dist
+                    break
+            else:
+                p_name, p_ver = p_components
+                if len(p_ver) < 2 or p_ver[0] != '(' or p_ver[-1] != ')':
+                    raise DistutilsError(('Distribution %s has invalid ' +
+                                          'provides field') % (dist.name,))
+                p_ver = p_ver[1:-1] # trim off the parenthesis
+                if p_name == name and predicate.match(p_ver):
+                    yield dist
+                    break
+
+
 def get_file_users(path):
     """
     Iterates over all distributions to find out which distributions uses
diff --git a/src/distutils2/_backport/tests/fake_dists/bacon-0.1.egg-info/PKG-INFO b/src/distutils2/_backport/tests/fake_dists/bacon-0.1.egg-info/PKG-INFO
new file mode 100644
--- /dev/null
+++ b/src/distutils2/_backport/tests/fake_dists/bacon-0.1.egg-info/PKG-INFO
@@ -0,0 +1,5 @@
+Metadata-Version: 1.2
+Name: bacon
+Version: 0.1
+Provides-Dist: truffles (2.0)
+Obsoletes-Dist: truffles (>=0.9,<=1.5)
diff --git a/src/distutils2/_backport/tests/fake_dists/cheese-2.0.2.egg-info b/src/distutils2/_backport/tests/fake_dists/cheese-2.0.2.egg-info
new file mode 100644
--- /dev/null
+++ b/src/distutils2/_backport/tests/fake_dists/cheese-2.0.2.egg-info
@@ -0,0 +1,5 @@
+Metadata-Version: 1.2
+Name: cheese
+Version: 2.0.2
+Provides-Dist: truffles (1.0.2)
+Obsoletes-Dist: truffles (!=1.2,<=2.0)
diff --git a/src/distutils2/_backport/tests/fake_dists/choxie-2.0.0.9.dist-info/METADATA b/src/distutils2/_backport/tests/fake_dists/choxie-2.0.0.9.dist-info/METADATA
--- a/src/distutils2/_backport/tests/fake_dists/choxie-2.0.0.9.dist-info/METADATA
+++ b/src/distutils2/_backport/tests/fake_dists/choxie-2.0.0.9.dist-info/METADATA
@@ -4,3 +4,5 @@
 Summary: Chocolate with a kick!
 Requires-Dist: towel-stuff (0.1)
 Provides-Dist: truffles (1.0)
+Obsoletes-Dist: truffles (<=0.8,>=0.5)
+Obsoletes-Dist: truffles (<=0.9,>=0.6)
diff --git a/src/distutils2/_backport/tests/fake_dists/grammar-1.0a4.dist-info/METADATA b/src/distutils2/_backport/tests/fake_dists/grammar-1.0a4.dist-info/METADATA
--- a/src/distutils2/_backport/tests/fake_dists/grammar-1.0a4.dist-info/METADATA
+++ b/src/distutils2/_backport/tests/fake_dists/grammar-1.0a4.dist-info/METADATA
@@ -1,3 +1,4 @@
 Metadata-Version: 1.2
 Name: grammar
 Version: 1.0a4
+Requires-Dist: truffles (>=1.2)
diff --git a/src/distutils2/_backport/tests/fake_dists/towel_stuff-0.1.dist-info/METADATA b/src/distutils2/_backport/tests/fake_dists/towel_stuff-0.1.dist-info/METADATA
--- a/src/distutils2/_backport/tests/fake_dists/towel_stuff-0.1.dist-info/METADATA
+++ b/src/distutils2/_backport/tests/fake_dists/towel_stuff-0.1.dist-info/METADATA
@@ -1,3 +1,5 @@
 Metadata-Version: 1.2
 Name: towel-stuff
 Version: 0.1
+Provides-Dist: truffles (1.1.2)
+Obsoletes-Dist: truffles (!=0.8,<1.0)
diff --git a/src/distutils2/_backport/tests/test_pkgutil.py b/src/distutils2/_backport/tests/test_pkgutil.py
--- a/src/distutils2/_backport/tests/test_pkgutil.py
+++ b/src/distutils2/_backport/tests/test_pkgutil.py
@@ -216,7 +216,9 @@
         found_dists = []
 
         # Import the function in question
-        from distutils2._backport.pkgutil import get_distributions, Distribution
+        from distutils2._backport.pkgutil import get_distributions, \
+                                                 Distribution, \
+                                                 EggInfoDistribution
 
         # Verify the fake dists have been found.
         dists = [ dist for dist in get_distributions() ]
@@ -231,13 +233,31 @@
         # Finally, test that we found all that we were looking for
         self.assertListEqual(sorted(found_dists), sorted(fake_dists))
 
+        # Now, test if the egg-info distributions are found correctly as well
+        fake_dists += [('bacon', '0.1'), ('cheese', '2.0.2')]
+        found_dists = []
+
+        dists = [ dist for dist in get_distributions(use_egg_info=True) ]
+        for dist in dists:
+            if not (isinstance(dist, Distribution) or \
+                    isinstance(dist, EggInfoDistribution)):
+                self.fail("item received was not a Distribution or "
+                          "EggInfoDistribution instance: %s" % type(dist))
+            if dist.name in dict(fake_dists).keys():
+                found_dists.append((dist.name, dist.metadata['version']))
+
+        self.assertListEqual(sorted(fake_dists), sorted(found_dists))
+
+
     def test_get_distribution(self):
         """Test for looking up a distribution by name."""
         # Test the lookup of the towel-stuff distribution
         name = 'towel-stuff' # Note: This is different from the directory name
 
         # Import the function in question
-        from distutils2._backport.pkgutil import get_distribution, Distribution
+        from distutils2._backport.pkgutil import get_distribution, \
+                                                 Distribution, \
+                                                 EggInfoDistribution
 
         # Lookup the distribution
         dist = get_distribution(name)
@@ -250,6 +270,21 @@
         # Verify partial name matching doesn't work
         self.assertEqual(None, get_distribution('towel'))
 
+        # Verify that it does not find egg-info distributions, when not
+        # instructed to
+        self.assertEqual(None, get_distribution('bacon'))
+        self.assertEqual(None, get_distribution('cheese'))
+
+        # Now check that it works well in both situations, when egg-info
+        # is a file and directory respectively.
+        dist = get_distribution('cheese', use_egg_info=True)
+        self.assertTrue(isinstance(dist, EggInfoDistribution))
+        self.assertEqual(dist.name, 'cheese')
+
+        dist = get_distribution('bacon', use_egg_info=True)
+        self.assertTrue(isinstance(dist, EggInfoDistribution))
+        self.assertEqual(dist.name, 'bacon')
+
     def test_get_file_users(self):
         """Test the iteration of distributions that use a file."""
         from distutils2._backport.pkgutil import get_file_users, Distribution
@@ -260,6 +295,80 @@
             self.assertTrue(isinstance(dist, Distribution))
             self.assertEqual(dist.name, name)
 
+    def test_provides(self):
+        """ Test for looking up distributions by what they provide """
+        from distutils2._backport.pkgutil import provides_distribution
+        from distutils2.errors import DistutilsError
+
+        checkLists = lambda x,y: self.assertListEqual(sorted(x), sorted(y))
+
+        l = [dist.name for dist in provides_distribution('truffles')]
+        checkLists(l, ['choxie', 'towel-stuff'])
+
+        l = [dist.name for dist in provides_distribution('truffles', '1.0')]
+        checkLists(l, ['choxie'])
+
+        l = [dist.name for dist in provides_distribution('truffles', '1.0',
+                                                         use_egg_info=True)]
+        checkLists(l, ['choxie', 'cheese'])
+
+        l = [dist.name for dist in provides_distribution('truffles', '1.1.2')]
+        checkLists(l, ['towel-stuff'])
+
+        l = [dist.name for dist in provides_distribution('truffles', '1.1')]
+        checkLists(l, ['towel-stuff'])
+
+        l = [dist.name for dist in provides_distribution('truffles', '!=1.1,<=2.0')]
+        checkLists(l, ['choxie'])
+
+        l = [dist.name for dist in provides_distribution('truffles', '!=1.1,<=2.0',
+                                                          use_egg_info=True)]
+        checkLists(l, ['choxie', 'bacon', 'cheese'])
+
+        l = [dist.name for dist in provides_distribution('truffles', '>1.0')]
+        checkLists(l, ['towel-stuff'])
+
+        l = [dist.name for dist in provides_distribution('truffles', '>1.5')]
+        checkLists(l, [])
+
+        l = [dist.name for dist in provides_distribution('truffles', '>1.5',
+                                                         use_egg_info=True)]
+        checkLists(l, ['bacon'])
+
+        l = [dist.name for dist in provides_distribution('truffles', '>=1.0')]
+        checkLists(l, ['choxie', 'towel-stuff'])
+
+    def test_obsoletes(self):
+        """ Test looking for distributions based on what they obsolete """
+        from distutils2._backport.pkgutil import obsoletes_distribution
+        from distutils2.errors import DistutilsError
+
+        checkLists = lambda x,y: self.assertListEqual(sorted(x), sorted(y))
+
+        l = [dist.name for dist in obsoletes_distribution('truffles', '1.0')]
+        checkLists(l, [])
+
+        l = [dist.name for dist in obsoletes_distribution('truffles', '1.0',
+                                                          use_egg_info=True)]
+        checkLists(l, ['cheese', 'bacon'])
+
+
+        l = [dist.name for dist in obsoletes_distribution('truffles', '0.8')]
+        checkLists(l, ['choxie'])
+
+        l = [dist.name for dist in obsoletes_distribution('truffles', '0.8',
+                                                          use_egg_info=True)]
+        checkLists(l, ['choxie', 'cheese'])
+
+        l = [dist.name for dist in obsoletes_distribution('truffles', '0.9.6')]
+        checkLists(l, ['choxie', 'towel-stuff'])
+
+        l = [dist.name for dist in obsoletes_distribution('truffles', '0.5.2.3')]
+        checkLists(l, ['choxie', 'towel-stuff'])
+
+        l = [dist.name for dist in obsoletes_distribution('truffles', '0.2')]
+        checkLists(l, ['towel-stuff'])
+
 
 def test_suite():
     suite = unittest2.TestSuite()
@@ -273,3 +382,16 @@
 
 if __name__ == "__main__":
     test_main()
+
+def test_suite():
+    suite = unittest2.TestSuite()
+    testcase_loader = unittest2.loader.defaultTestLoader.loadTestsFromTestCase
+    suite.addTest(testcase_loader(TestPkgUtilFunctions))
+    suite.addTest(testcase_loader(TestPkgUtilDistribution))
+    return suite
+
+def test_main():
+    run_unittest(test_suite())
+
+if __name__ == "__main__":
+    test_main()
diff --git a/src/distutils2/command/__init__.py b/src/distutils2/command/__init__.py
--- a/src/distutils2/command/__init__.py
+++ b/src/distutils2/command/__init__.py
@@ -20,7 +20,6 @@
            'register',
            'bdist',
            'bdist_dumb',
-           'bdist_rpm',
            'bdist_wininst',
            'upload',
            'check',
diff --git a/src/distutils2/command/bdist.py b/src/distutils2/command/bdist.py
--- a/src/distutils2/command/bdist.py
+++ b/src/distutils2/command/bdist.py
@@ -55,9 +55,6 @@
          "lists available distribution formats", show_formats),
         ]
 
-    # The following commands do not take a format option from bdist
-    no_format_option = ('bdist_rpm',)
-
     # This won't do in reality: will need to distinguish RPM-ish Linux,
     # Debian-ish Linux, Solaris, FreeBSD, ..., Windows, Mac OS.
     default_format = {'posix': 'gztar',
@@ -69,8 +66,7 @@
                        'wininst', 'zip', 'msi']
 
     # And the real information.
-    format_command = {'rpm':   ('bdist_rpm',  "RPM distribution"),
-                      'gztar': ('bdist_dumb', "gzip'ed tar file"),
+    format_command = {'gztar': ('bdist_dumb', "gzip'ed tar file"),
                       'bztar': ('bdist_dumb', "bzip2'ed tar file"),
                       'ztar':  ('bdist_dumb', "compressed tar file"),
                       'tar':   ('bdist_dumb', "tar file"),
@@ -131,8 +127,6 @@
         for i in range(len(self.formats)):
             cmd_name = commands[i]
             sub_cmd = self.reinitialize_command(cmd_name)
-            if cmd_name not in self.no_format_option:
-                sub_cmd.format = self.formats[i]
 
             # passing the owner and group names for tar archiving
             if cmd_name == 'bdist_dumb':
diff --git a/src/distutils2/command/sdist.py b/src/distutils2/command/sdist.py
--- a/src/distutils2/command/sdist.py
+++ b/src/distutils2/command/sdist.py
@@ -10,6 +10,8 @@
 from glob import glob
 from warnings import warn
 from shutil import rmtree
+import re
+
 try:
     from shutil import get_archive_formats
 except ImportError:
@@ -17,10 +19,9 @@
 
 from distutils2.core import Command
 from distutils2 import util
-from distutils2.text_file import TextFile
 from distutils2.errors import (DistutilsPlatformError, DistutilsOptionError,
                               DistutilsTemplateError)
-from distutils2.filelist import FileList
+from distutils2.manifest import Manifest
 from distutils2 import log
 from distutils2.util import convert_path, newer
 
@@ -36,6 +37,10 @@
     FancyGetopt(formats).print_help(
         "List of available source distribution formats:")
 
+# a \ followed by some spaces + EOL
+_COLLAPSE_PATTERN = re.compile('\\\w\n', re.M)
+_COMMENTED_LINE = re.compile('^#.*\n$|^\w*\n$', re.M)
+
 class sdist(Command):
 
     description = "create a source distribution (tarball, zip file, etc.)"
@@ -63,10 +68,7 @@
         ('no-prune', None,
          "don't automatically exclude anything"),
         ('manifest-only', 'o',
-         "just regenerate the manifest and then stop "
-         "(implies --force-manifest)"),
-        ('force-manifest', 'f',
-         "forcibly regenerate the manifest and carry on as usual"),
+         "just regenerate the manifest and then stop "),
         ('formats=', None,
          "formats for source distribution (comma-separated list)"),
         ('keep-temp', 'k',
@@ -85,8 +87,7 @@
         ]
 
     boolean_options = ['use-defaults', 'prune',
-                       'manifest-only', 'force-manifest',
-                       'keep-temp', 'metadata-check']
+                       'manifest-only', 'keep-temp', 'metadata-check']
 
     help_options = [
         ('help-formats', None,
@@ -111,10 +112,7 @@
         # in the manifest
         self.use_defaults = 1
         self.prune = 1
-
         self.manifest_only = 0
-        self.force_manifest = 0
-
         self.formats = None
         self.keep_temp = 0
         self.dist_dir = None
@@ -123,6 +121,7 @@
         self.metadata_check = 1
         self.owner = None
         self.group = None
+        self.filelist = None
 
     def _check_archive_formats(self, formats):
         supported_formats = [name for name, desc in get_archive_formats()]
@@ -154,10 +153,14 @@
         if self.dist_dir is None:
             self.dist_dir = "dist"
 
+        if self.filelist is None:
+            self.filelist = Manifest()
+
+
     def run(self):
         # 'filelist' contains the list of files that will make up the
         # manifest
-        self.filelist = FileList()
+        self.filelist.clear()
 
         # Run sub commands
         for cmd_name in self.get_sub_commands():
@@ -189,64 +192,24 @@
         distribution, and put it in 'self.filelist'.  This might involve
         reading the manifest template (and writing the manifest), or just
         reading the manifest, or just using the default file set -- it all
-        depends on the user's options and the state of the filesystem.
+        depends on the user's options.
         """
-        # If we have a manifest template, see if it's newer than the
-        # manifest; if so, we'll regenerate the manifest.
         template_exists = os.path.isfile(self.template)
+        if not template_exists:
+            self.warn(("manifest template '%s' does not exist " +
+                        "(using default file list)") %
+                        self.template)
+
+        self.filelist.findall()
+
+        if self.use_defaults:
+            self.add_defaults()
         if template_exists:
-            template_newer = newer(self.template, self.manifest)
+            self.filelist.read_template(self.template)
+        if self.prune:
+            self.prune_file_list()
 
-        # The contents of the manifest file almost certainly depend on the
-        # setup script as well as the manifest template -- so if the setup
-        # script is newer than the manifest, we'll regenerate the manifest
-        # from the template.  (Well, not quite: if we already have a
-        # manifest, but there's no template -- which will happen if the
-        # developer elects to generate a manifest some other way -- then we
-        # can't regenerate the manifest, so we don't.)
-        setup_newer = newer(self.distribution.script_name,
-                            self.manifest)
-
-        # cases:
-        #   1) no manifest, template exists: generate manifest
-        #      (covered by 2a: no manifest == template newer)
-        #   2) manifest & template exist:
-        #      2a) template or setup script newer than manifest:
-        #          regenerate manifest
-        #      2b) manifest newer than both:
-        #          do nothing (unless --force or --manifest-only)
-        #   3) manifest exists, no template:
-        #      do nothing (unless --force or --manifest-only)
-        #   4) no manifest, no template: generate w/ warning ("defaults only")
-
-        manifest_outofdate = (template_exists and
-                              (template_newer or setup_newer))
-        force_regen = self.force_manifest or self.manifest_only
-        manifest_exists = os.path.isfile(self.manifest)
-        neither_exists = (not template_exists and not manifest_exists)
-
-        # Regenerate the manifest if necessary (or if explicitly told to)
-        if manifest_outofdate or neither_exists or force_regen:
-            if not template_exists:
-                self.warn(("manifest template '%s' does not exist " +
-                           "(using default file list)") %
-                          self.template)
-            self.filelist.findall()
-
-            if self.use_defaults:
-                self.add_defaults()
-            if template_exists:
-                self.read_template()
-            if self.prune:
-                self.prune_file_list()
-
-            self.filelist.sort()
-            self.filelist.remove_duplicates()
-            self.write_manifest()
-
-        # Don't regenerate the manifest, just read it in.
-        else:
-            self.read_manifest()
+        self.filelist.write(self.manifest)
 
     def add_defaults(self):
         """Add all the default files to self.filelist:
@@ -330,32 +293,6 @@
             build_scripts = self.get_finalized_command('build_scripts')
             self.filelist.extend(build_scripts.get_source_files())
 
-    def read_template(self):
-        """Read and parse manifest template file named by self.template.
-
-        (usually "MANIFEST.in") The parsing and processing is done by
-        'self.filelist', which updates itself accordingly.
-        """
-        log.info("reading manifest template '%s'", self.template)
-        template = TextFile(self.template,
-                            strip_comments=1,
-                            skip_blanks=1,
-                            join_lines=1,
-                            lstrip_ws=1,
-                            rstrip_ws=1,
-                            collapse_join=1)
-
-        while 1:
-            line = template.readline()
-            if line is None:            # end of file
-                break
-
-            try:
-                self.filelist.process_template_line(line)
-            except DistutilsTemplateError, msg:
-                self.warn("%s, line %d: %s" % (template.filename,
-                                               template.current_line,
-                                               msg))
 
     def prune_file_list(self):
         """Prune off branches that might slip into the file list as created
@@ -383,30 +320,6 @@
         vcs_ptrn = r'(^|%s)(%s)(%s).*' % (seps, '|'.join(vcs_dirs), seps)
         self.filelist.exclude_pattern(vcs_ptrn, is_regex=1)
 
-    def write_manifest(self):
-        """Write the file list in 'self.filelist' (presumably as filled in
-        by 'add_defaults()' and 'read_template()') to the manifest file
-        named by 'self.manifest'.
-        """
-        self.execute(util.write_file,
-                     (self.manifest, self.filelist.files),
-                     "writing manifest file '%s'" % self.manifest)
-
-    def read_manifest(self):
-        """Read the manifest file (named by 'self.manifest') and use it to
-        fill in 'self.filelist', the list of files to include in the source
-        distribution.
-        """
-        log.info("reading manifest file '%s'", self.manifest)
-        manifest = open(self.manifest)
-        while 1:
-            line = manifest.readline()
-            if line == '':              # end of file
-                break
-            if line[-1] == '\n':
-                line = line[0:-1]
-            self.filelist.append(line)
-        manifest.close()
 
     def make_release_tree(self, base_dir, files):
         """Create the directory tree that will become the source
diff --git a/src/distutils2/command/upload.py b/src/distutils2/command/upload.py
--- a/src/distutils2/command/upload.py
+++ b/src/distutils2/command/upload.py
@@ -96,11 +96,7 @@
         data['md5_digest'] = md5(content).hexdigest()
 
         comment = ''
-        if command == 'bdist_rpm':
-            dist, version, id = platform.dist()
-            if dist:
-                comment = 'built for %s %s' % (dist, version)
-        elif command == 'bdist_dumb':
+        if command == 'bdist_dumb':
             comment = 'built for %s' % platform.platform(terse=1)
         data['comment'] = comment
 
diff --git a/src/distutils2/converter/__init__.py b/src/distutils2/converter/__init__.py
new file mode 100644
--- /dev/null
+++ b/src/distutils2/converter/__init__.py
@@ -0,0 +1,8 @@
+"""distutils2.converter
+
+This package provide a refactoring tool to transform a
+setuptools or distutils project into a distutils2 one.
+"""
+
+from distutils2.converter.refactor import DistutilsRefactoringTool
+
diff --git a/src/distutils2/converter/fixers/__init__.py b/src/distutils2/converter/fixers/__init__.py
new file mode 100644
--- /dev/null
+++ b/src/distutils2/converter/fixers/__init__.py
@@ -0,0 +1,4 @@
+"""distutils2.converter.fixers
+
+Contains all fixers for the converter.
+"""
diff --git a/src/distutils2/converter/fixers/fix_imports.py b/src/distutils2/converter/fixers/fix_imports.py
new file mode 100644
--- /dev/null
+++ b/src/distutils2/converter/fixers/fix_imports.py
@@ -0,0 +1,47 @@
+"""distutils2.converter.fixers.fix_imports
+
+Fixer for import statements in setup.py
+"""
+from lib2to3.fixer_base import BaseFix
+from lib2to3.fixer_util import syms
+
+
+class FixImports(BaseFix):
+    """Makes sure all import in setup.py are translated"""
+
+    PATTERN = """
+    import_from< 'from' imp=any 'import' ['('] any [')'] >
+    |
+    import_name< 'import' imp=any >
+    """
+
+    def transform(self, node, results):
+        imp = results['imp']
+        if node.type != syms.import_from:
+            return
+
+        if not hasattr(imp, "next_sibling"):
+            imp.next_sibling = imp.get_next_sibling()
+
+        while not hasattr(imp, 'value'):
+            imp = imp.children[0]
+
+        if imp.value == 'distutils':
+            imp.value = 'distutils2'
+            imp.changed()
+            return node
+
+        if imp.value == 'setuptools':
+            # catching "from setuptools import setup"
+            pattern = []
+            next = imp.next_sibling
+            while next is not None:
+                pattern.append(next.value)
+                if not hasattr(next, "next_sibling"):
+                    next.next_sibling = next.get_next_sibling()
+                next = next.next_sibling
+            if pattern == ['import', 'setup']:
+                imp.value = 'distutils2.core'
+                imp.changed()
+
+            return node
diff --git a/src/distutils2/converter/fixers/fix_setup_options.py b/src/distutils2/converter/fixers/fix_setup_options.py
new file mode 100644
--- /dev/null
+++ b/src/distutils2/converter/fixers/fix_setup_options.py
@@ -0,0 +1,85 @@
+"""Fixer for setup() options.
+
+All distutils or setuptools options are translated
+into PEP 345-style options.
+"""
+from lib2to3.pytree import Leaf, Node
+from lib2to3.pgen2 import token
+from lib2to3.fixer_base import BaseFix
+
+# XXX where is that defined ?
+_ARG = 260
+
+# name mapping : we want to convert
+# all old-style options to distutils2 style
+_OLD_NAMES = {'url': 'home_page',
+              'long_description': 'description',
+              'description': 'summary',
+              'install_requires': 'requires_dist'}
+
+_SEQUENCE_NAMES = ['requires_dist']
+
+
+class FixSetupOptions(BaseFix):
+
+    # XXX need to find something better here :
+    # identify a setup call, whatever alias is used
+    PATTERN = """
+            power< name='setup' trailer< '(' [any] ')' > any* >
+              """
+
+    def _get_list(self, *nodes):
+        """A List node, filled"""
+        lbrace = Leaf(token.LBRACE, u"[")
+        lbrace.prefix = u" "
+        if len(nodes) > 0:
+            nodes[0].prefix = u""
+        return Node(self.syms.trailer,
+                    [lbrace] +
+                    [node.clone() for node in nodes] +
+                    [Leaf(token.RBRACE, u"]")])
+
+    def _fix_name(self, argument, remove_list):
+        name = argument.children[0]
+
+        if not hasattr(name, "next_sibling"):
+            name.next_sibling = name.get_next_sibling()
+
+        sibling = name.next_sibling
+        if sibling is None or sibling.type != token.EQUAL:
+            return False
+
+        if name.value in _OLD_NAMES:
+            name.value = _OLD_NAMES[name.value]
+            if name.value in _SEQUENCE_NAMES:
+                if not hasattr(sibling, "next_sibling"):
+                    sibling.next_sibling = sibling.get_next_sibling()
+                right_operand = sibling.next_sibling
+                # replacing string -> list[string]
+                if right_operand.type == token.STRING:
+                    # we want this to be a list now
+                    new_node = self._get_list(right_operand)
+                    right_operand.replace(new_node)
+
+
+            return True
+
+        return False
+
+    def transform(self, node, results):
+        arglist = node.children[1].children[1]
+        remove_list = []
+        changed = False
+
+        for subnode in arglist.children:
+            if subnode.type != _ARG:
+                continue
+            if self._fix_name(subnode, remove_list) and not changed:
+                changed = True
+
+        for subnode in remove_list:
+            subnode.remove()
+
+        if changed:
+            node.changed()
+        return node
diff --git a/src/distutils2/converter/refactor.py b/src/distutils2/converter/refactor.py
new file mode 100644
--- /dev/null
+++ b/src/distutils2/converter/refactor.py
@@ -0,0 +1,28 @@
+"""distutils2.converter.refactor
+
+Provides DistutilsRefactoringTool, a class that register fixers used
+to refactor distutils or setuptools packages into distutils2 ones.
+"""
+try:
+    from lib2to3.refactor import RefactoringTool
+    _LIB2TO3 = True
+except ImportError:
+    # we need 2.6 at least to run this
+    _LIB2TO3 = False
+
+_DISTUTILS_FIXERS = ['distutils2.converter.fixers.fix_imports',
+                     'distutils2.converter.fixers.fix_setup_options']
+
+if _LIB2TO3:
+    class DistutilsRefactoringTool(RefactoringTool):
+
+        def __init__(self, fixer_names=_DISTUTILS_FIXERS, options=None,
+                    explicit=None):
+
+            super(DistutilsRefactoringTool, self).__init__(fixer_names, options,
+                                                            explicit)
+else:
+    class DistutilsRefactoringTool(object):
+        def __init__(self, *args, **kw):
+            raise NotImplementedError('Not available if run from Python < 2.6')
+
diff --git a/src/distutils2/depgraph.py b/src/distutils2/depgraph.py
new file mode 100644
--- /dev/null
+++ b/src/distutils2/depgraph.py
@@ -0,0 +1,181 @@
+"""
+A dependency graph generator. The graph is represented as an instance of
+:class:`DependencyGraph`, and DOT output is possible as well.
+"""
+
+from distutils2._backport import pkgutil
+from distutils2.errors import DistutilsError
+from distutils2.version import VersionPredicate
+
+__all__ = ['DependencyGraph', 'generate_graph']
+
+
+class DependencyGraph(object):
+    """
+    Represents a dependency graph between distributions.
+
+    The depedency relationships are stored in an *adjacency_list* that maps
+    distributions to a list of ``(other, label)`` tuples where  ``other``
+    is a distribution and the edge is labelled with ``label`` (i.e. the version
+    specifier, if such was provided). If any missing depencies are found,
+    they are stored in ``missing``. It maps distributions to a list of
+    requirements that were not provided by any other distributions.
+    """
+
+    def __init__(self):
+        self.adjacency_list = {}
+        self.missing = {}
+
+    def add_distribution(self, distribution):
+        """
+        Add distribution *x* to the graph.
+
+        :type distribution: :class:`pkgutil.Distribution` or
+                            :class:`pkgutil.EggInfoDistribution`
+        """
+        self.adjacency_list[distribution] = list()
+        self.missing[distribution] = list()
+
+    def add_edge(self, x, y, label=None):
+        """
+        Add an edge from distribution *x* to distribution *y* with the given
+        *label*.
+
+
+        :type x: :class:`pkgutil.Distribution` or
+                 :class:`pkgutil.EggInfoDistribution`
+        :type y: :class:`pkgutil.Distribution` or
+                 :class:`pkgutil.EggInfoDistribution`
+        :type label: ``str`` or ``None``
+        """
+        self.adjacency_list[x].append((y, label))
+
+    def add_missing(self, distribution, requirement):
+        """
+        Add a missing *requirement* for the given *distribution*.
+
+        :type distribution: :class:`pkgutil.Distribution` or
+                            :class:`pkgutil.EggInfoDistribution`
+        :type requirement: ``str``
+        """
+        self.missing[distribution].append(requirement)
+
+    def to_dot(self, f, skip_disconnected=True):
+        """
+        Writes a DOT output for the graph to the provided *file*.
+        If *skip_disconnected* is set to ``True``, then all distributions
+        that are not dependent on any other distributions are skipped.
+
+        :type f: ``file``
+        ;type skip_disconnected: ``bool``
+        """
+        if not isinstance(f, file):
+            raise TypeError('the argument has to be of type file')
+
+        disconnected = []
+
+        f.write("digraph dependencies {\n")
+        for dist, adjs in self.adjacency_list.iteritems():
+            if len(adjs) == 0 and not skip_disconnected:
+                disconnected.append(dist)
+            for (other, label) in adjs:
+                if not label is None:
+                    f.write('"%s" -> "%s" [label="%s"]\n' %
+                                                (dist.name, other.name, label))
+                else:
+                    f.write('"%s" -> "%s"\n' % (dist.name, other.name))
+        if not skip_disconnected and len(disconnected) > 0:
+            f.write('subgraph disconnected {\n')
+            f.write('label = "Disconnected"\n')
+            f.write('bgcolor = red\n')
+
+            for dist in disconnected:
+                f.write('"%s"' % dist.name)
+                f.write('\n')
+            f.write('}\n')
+        f.write('}\n')
+
+
+def generate_graph(dists):
+    """
+    Generates a dependency graph from the given distributions.
+
+    :parameter dists: a list of distributions
+    :type dists: list of :class:`pkgutil.Distribution` and
+                         :class:`pkgutil.EggInfoDistribution` instances
+    :rtype: an :class:`DependencyGraph` instance
+    """
+    graph = DependencyGraph()
+    provided = {} # maps names to lists of (version, dist) tuples
+    dists = list(dists) # maybe use generator_tools in future
+
+    # first, build the graph and find out the provides
+    for dist in dists:
+        graph.add_distribution(dist)
+        provides = dist.metadata['Provides-Dist'] + dist.metadata['Provides']
+
+        for p in provides:
+            comps = p.split(" ", 1)
+            name = comps[0]
+            version = None
+            if len(comps) == 2:
+                version = comps[1]
+                if len(version) < 3 or version[0] != '(' or version[-1] != ')':
+                    raise DistutilsError('Distribution %s has ill formed' \
+                                         'provides field: %s' % (dist.name, p))
+                version = version[1:-1] # trim off parenthesis
+            if not name in provided:
+                provided[name] = []
+            provided[name].append((version, dist))
+
+    # now make the edges
+    for dist in dists:
+        requires = dist.metadata['Requires-Dist'] + dist.metadata['Requires']
+        for req in requires:
+            predicate = VersionPredicate(req)
+            comps = req.split(" ", 1)
+            name = comps[0]
+
+            if not name in provided:
+                graph.add_missing(dist, req)
+            else:
+                for (version, provider) in provided[name]:
+                    if predicate.match(version):
+                        graph.add_edge(dist, provider, req)
+
+    return graph
+
+
+def dependent_dists(dists, dist):
+    """
+    Recursively generate a list of distributions from *dists* that are
+    dependent on *dist*.
+
+    :param dists: a list of distributions
+    :param dist: a distribution, member of *dists* for which we are interested
+    """
+    if not dist in dists:
+        raise ValueError('The given distribution is not a member of the list')
+    graph = generate_graph(dists)
+
+    dep = [dist]
+    fringe = [dist] # list of nodes we should expand
+    while not len(fringe) == 0:
+        next = graph.adjacency_list[fringe.pop()]
+        for (dist, label) in next:
+            if not dist in dep: # avoid infinite loops
+                dep.append(dist)
+                fringe.append(dist)
+
+    dep.pop()
+    return dep
+
+if __name__ == '__main__':
+    dists = list(pkgutil.get_distributions(use_egg_info=True))
+    graph = generate_graph(dists)
+    for dist, reqs in graph.missing.iteritems():
+        if len(reqs) > 0:
+            print("Missing dependencies for %s: %s" % (dist.name,
+                                                       ", ".join(reqs)))
+    f = open('output.dot', 'w')
+    graph.to_dot(f, True)
diff --git a/src/distutils2/filelist.py b/src/distutils2/filelist.py
deleted file mode 100644
--- a/src/distutils2/filelist.py
+++ /dev/null
@@ -1,306 +0,0 @@
-"""distutils.filelist
-
-Provides the FileList class, used for poking about the filesystem
-and building lists of files.
-"""
-
-__revision__ = "$Id: filelist.py 75196 2009-10-03 00:07:35Z tarek.ziade $"
-
-import os, re
-import fnmatch
-from distutils2.util import convert_path
-from distutils2.errors import DistutilsTemplateError, DistutilsInternalError
-from distutils2 import log
-
-class FileList(object):
-    """A list of files built by on exploring the filesystem and filtered by
-    applying various patterns to what we find there.
-
-    Instance attributes:
-      dir
-        directory from which files will be taken -- only used if
-        'allfiles' not supplied to constructor
-      files
-        list of filenames currently being built/filtered/manipulated
-      allfiles
-        complete list of files under consideration (ie. without any
-        filtering applied)
-    """
-
-    def __init__(self):
-        self.allfiles = None
-        self.files = []
-
-    def findall(self, dir=os.curdir):
-        self.allfiles = findall(dir)
-
-    # -- List-like methods ---------------------------------------------
-
-    def append(self, item):
-        self.files.append(item)
-
-    def extend(self, items):
-        self.files.extend(items)
-
-    def sort(self):
-        # Not a strict lexical sort!
-        sortable_files = map(os.path.split, self.files)
-        sortable_files.sort()
-        self.files = []
-        for sort_tuple in sortable_files:
-            self.files.append(os.path.join(*sort_tuple))
-
-
-    # -- Other miscellaneous utility methods ---------------------------
-
-    def remove_duplicates(self):
-        # Assumes list has been sorted!
-        for i in range(len(self.files) - 1, 0, -1):
-            if self.files[i] == self.files[i - 1]:
-                del self.files[i]
-
-
-    # -- "File template" methods ---------------------------------------
-
-    def _parse_template_line(self, line):
-        words = line.split()
-        action = words[0]
-
-        patterns = dir = dir_pattern = None
-
-        if action in ('include', 'exclude',
-                      'global-include', 'global-exclude'):
-            if len(words) < 2:
-                raise DistutilsTemplateError, \
-                      "'%s' expects <pattern1> <pattern2> ..." % action
-
-            patterns = map(convert_path, words[1:])
-
-        elif action in ('recursive-include', 'recursive-exclude'):
-            if len(words) < 3:
-                raise DistutilsTemplateError, \
-                      "'%s' expects <dir> <pattern1> <pattern2> ..." % action
-
-            dir = convert_path(words[1])
-            patterns = map(convert_path, words[2:])
-
-        elif action in ('graft', 'prune'):
-            if len(words) != 2:
-                raise DistutilsTemplateError, \
-                     "'%s' expects a single <dir_pattern>" % action
-
-            dir_pattern = convert_path(words[1])
-
-        else:
-            raise DistutilsTemplateError, "unknown action '%s'" % action
-
-        return (action, patterns, dir, dir_pattern)
-
-    def process_template_line(self, line):
-        # Parse the line: split it up, make sure the right number of words
-        # is there, and return the relevant words.  'action' is always
-        # defined: it's the first word of the line.  Which of the other
-        # three are defined depends on the action; it'll be either
-        # patterns, (dir and patterns), or (dir_pattern).
-        action, patterns, dir, dir_pattern = self._parse_template_line(line)
-
-        # OK, now we know that the action is valid and we have the
-        # right number of words on the line for that action -- so we
-        # can proceed with minimal error-checking.
-        if action == 'include':
-            for pattern in patterns:
-                if not self.include_pattern(pattern, anchor=1):
-                    log.warn("warning: no files found matching '%s'",
-                             pattern)
-
-        elif action == 'exclude':
-            for pattern in patterns:
-                if not self.exclude_pattern(pattern, anchor=1):
-                    log.warn(("warning: no previously-included files "
-                              "found matching '%s'"), pattern)
-
-        elif action == 'global-include':
-            for pattern in patterns:
-                if not self.include_pattern(pattern, anchor=0):
-                    log.warn(("warning: no files found matching '%s' " +
-                              "anywhere in distribution"), pattern)
-
-        elif action == 'global-exclude':
-            for pattern in patterns:
-                if not self.exclude_pattern(pattern, anchor=0):
-                    log.warn(("warning: no previously-included files matching "
-                              "'%s' found anywhere in distribution"),
-                             pattern)
-
-        elif action == 'recursive-include':
-            for pattern in patterns:
-                if not self.include_pattern(pattern, prefix=dir):
-                    log.warn(("warning: no files found matching '%s' " +
-                                "under directory '%s'"),
-                             pattern, dir)
-
-        elif action == 'recursive-exclude':
-            for pattern in patterns:
-                if not self.exclude_pattern(pattern, prefix=dir):
-                    log.warn(("warning: no previously-included files matching "
-                              "'%s' found under directory '%s'"),
-                             pattern, dir)
-
-        elif action == 'graft':
-            if not self.include_pattern(None, prefix=dir_pattern):
-                log.warn("warning: no directories found matching '%s'",
-                         dir_pattern)
-
-        elif action == 'prune':
-            if not self.exclude_pattern(None, prefix=dir_pattern):
-                log.warn(("no previously-included directories found " +
-                          "matching '%s'"), dir_pattern)
-        else:
-            raise DistutilsInternalError, \
-                  "this cannot happen: invalid action '%s'" % action
-
-    # -- Filtering/selection methods -----------------------------------
-
-    def include_pattern(self, pattern, anchor=1, prefix=None, is_regex=0):
-        """Select strings (presumably filenames) from 'self.files' that
-        match 'pattern', a Unix-style wildcard (glob) pattern.
-
-        Patterns are not quite the same as implemented by the 'fnmatch'
-        module: '*' and '?'  match non-special characters, where "special"
-        is platform-dependent: slash on Unix; colon, slash, and backslash on
-        DOS/Windows; and colon on Mac OS.
-
-        If 'anchor' is true (the default), then the pattern match is more
-        stringent: "*.py" will match "foo.py" but not "foo/bar.py".  If
-        'anchor' is false, both of these will match.
-
-        If 'prefix' is supplied, then only filenames starting with 'prefix'
-        (itself a pattern) and ending with 'pattern', with anything in between
-        them, will match.  'anchor' is ignored in this case.
-
-        If 'is_regex' is true, 'anchor' and 'prefix' are ignored, and
-        'pattern' is assumed to be either a string containing a regex or a
-        regex object -- no translation is done, the regex is just compiled
-        and used as-is.
-
-        Selected strings will be added to self.files.
-
-        Return 1 if files are found.
-        """
-        files_found = 0
-        pattern_re = translate_pattern(pattern, anchor, prefix, is_regex)
-        # delayed loading of allfiles list
-        if self.allfiles is None:
-            self.findall()
-
-        for name in self.allfiles:
-            if pattern_re.search(name):
-                self.files.append(name)
-                files_found = 1
-
-        return files_found
-
-
-    def exclude_pattern(self, pattern, anchor=1, prefix=None, is_regex=0):
-        """Remove strings (presumably filenames) from 'files' that match
-        'pattern'.
-
-        Other parameters are the same as for 'include_pattern()', above.
-        The list 'self.files' is modified in place. Return 1 if files are
-        found.
-        """
-        files_found = 0
-        pattern_re = translate_pattern(pattern, anchor, prefix, is_regex)
-        for i in range(len(self.files)-1, -1, -1):
-            if pattern_re.search(self.files[i]):
-                del self.files[i]
-                files_found = 1
-
-        return files_found
-
-
-# ----------------------------------------------------------------------
-# Utility functions
-
-def findall(dir=os.curdir):
-    """Find all files under 'dir' and return the list of full filenames
-    (relative to 'dir').
-    """
-    from stat import ST_MODE, S_ISREG, S_ISDIR, S_ISLNK
-
-    list = []
-    stack = [dir]
-    pop = stack.pop
-    push = stack.append
-
-    while stack:
-        dir = pop()
-        names = os.listdir(dir)
-
-        for name in names:
-            if dir != os.curdir:        # avoid the dreaded "./" syndrome
-                fullname = os.path.join(dir, name)
-            else:
-                fullname = name
-
-            # Avoid excess stat calls -- just one will do, thank you!
-            stat = os.stat(fullname)
-            mode = stat[ST_MODE]
-            if S_ISREG(mode):
-                list.append(fullname)
-            elif S_ISDIR(mode) and not S_ISLNK(mode):
-                push(fullname)
-
-    return list
-
-
-def glob_to_re(pattern):
-    """Translate a shell-like glob pattern to a regular expression.
-
-    Return a string containing the regex.  Differs from
-    'fnmatch.translate()' in that '*' does not match "special characters"
-    (which are platform-specific).
-    """
-    pattern_re = fnmatch.translate(pattern)
-
-    # '?' and '*' in the glob pattern become '.' and '.*' in the RE, which
-    # IMHO is wrong -- '?' and '*' aren't supposed to match slash in Unix,
-    # and by extension they shouldn't match such "special characters" under
-    # any OS.  So change all non-escaped dots in the RE to match any
-    # character except the special characters.
-    # XXX currently the "special characters" are just slash -- i.e. this is
-    # Unix-only.
-    pattern_re = re.sub(r'((?<!\\)(\\\\)*)\.', r'\1[^/]', pattern_re)
-
-    return pattern_re
-
-
-def translate_pattern(pattern, anchor=1, prefix=None, is_regex=0):
-    """Translate a shell-like wildcard pattern to a compiled regular
-    expression.
-
-    Return the compiled regex.  If 'is_regex' true,
-    then 'pattern' is directly compiled to a regex (if it's a string)
-    or just returned as-is (assumes it's a regex object).
-    """
-    if is_regex:
-        if isinstance(pattern, str):
-            return re.compile(pattern)
-        else:
-            return pattern
-
-    if pattern:
-        pattern_re = glob_to_re(pattern)
-    else:
-        pattern_re = ''
-
-    if prefix is not None:
-        # ditch end of pattern character
-        empty_pattern = glob_to_re('')
-        prefix_re = glob_to_re(prefix)[:-len(empty_pattern)]
-        pattern_re = "^" + os.path.join(prefix_re, ".*" + pattern_re)
-    else:                               # no prefix -- respect anchor flag
-        if anchor:
-            pattern_re = "^" + pattern_re
-
-    return re.compile(pattern_re)
diff --git a/src/distutils2/manifest.py b/src/distutils2/manifest.py
new file mode 100644
--- /dev/null
+++ b/src/distutils2/manifest.py
@@ -0,0 +1,361 @@
+"""distutils2.manifest
+
+Provides a Manifest class that can be used to:
+
+ - read or write a MANIFEST file
+ - read a template file and find out the file list
+
+Basically, Manifest *is* the file list.
+
+XXX todo: document + add tests
+"""
+import re
+import os
+import fnmatch
+import logging
+
+from distutils2.util import write_file, convert_path
+from distutils2.errors import (DistutilsTemplateError,
+                               DistutilsInternalError)
+
+__all__ = ['Manifest']
+
+# a \ followed by some spaces + EOL
+_COLLAPSE_PATTERN = re.compile('\\\w*\n', re.M)
+_COMMENTED_LINE = re.compile('#.*?(?=\n)|^\w*\n|\n(?=$)', re.M|re.S)
+
+class Manifest(object):
+    """A list of files built by on exploring the filesystem and filtered by
+    applying various patterns to what we find there.
+    """
+
+    def __init__(self):
+        self.allfiles = None
+        self.files = []
+
+    #
+    # Public API
+    #
+
+    def findall(self, dir=os.curdir):
+        self.allfiles = _findall(dir)
+
+    def append(self, item):
+        self.files.append(item)
+
+    def extend(self, items):
+        self.files.extend(items)
+
+    def sort(self):
+        # Not a strict lexical sort!
+        sortable_files = map(os.path.split, self.files)
+        sortable_files.sort()
+        self.files = []
+        for sort_tuple in sortable_files:
+            self.files.append(os.path.join(*sort_tuple))
+
+    def clear(self):
+        """Clear all collected files."""
+        self.files = []
+        if self.allfiles is not None:
+            self.allfiles = []
+
+    def remove_duplicates(self):
+        # Assumes list has been sorted!
+        for i in range(len(self.files) - 1, 0, -1):
+            if self.files[i] == self.files[i - 1]:
+                del self.files[i]
+
+    def read_template(self, path):
+        """Read and parse a manifest template file.
+
+        Updates the list accordingly.
+        """
+        f = open(path)
+        try:
+            content = f.read()
+            # first, let's unwrap collapsed lines
+            content = _COLLAPSE_PATTERN.sub('', content)
+
+            # next, let's remove commented lines and empty lines
+            content = _COMMENTED_LINE.sub('', content)
+
+            # now we have our cleaned up lines
+            lines = [line.strip() for line in content.split('\n')]
+        finally:
+            f.close()
+
+        for line in lines:
+            try:
+                self._process_template_line(line)
+            except DistutilsTemplateError, msg:
+                logging.warning("%s, %s" % (path, msg))
+
+    def write(self, path):
+        """Write the file list in 'self.filelist' (presumably as filled in
+        by 'add_defaults()' and 'read_template()') to the manifest file
+        named by 'self.manifest'.
+        """
+        self.sort()
+        self.remove_duplicates()
+        write_file(path, self.files)
+
+    def read(self, path):
+        """Read the manifest file (named by 'self.manifest') and use it to
+        fill in 'self.filelist', the list of files to include in the source
+        distribution.
+        """
+        logging.info("reading manifest file '%s'" % path)
+        manifest = open(path)
+        try:
+            for line in manifest.readlines():
+                self.append(line)
+        finally:
+            manifest.close()
+
+    def exclude_pattern(self, pattern, anchor=1, prefix=None, is_regex=0):
+        """Remove strings (presumably filenames) from 'files' that match
+        'pattern'.
+
+        Other parameters are the same as for 'include_pattern()', above.
+        The list 'self.files' is modified in place. Return 1 if files are
+        found.
+        """
+        files_found = 0
+        pattern_re = _translate_pattern(pattern, anchor, prefix, is_regex)
+        for i in range(len(self.files)-1, -1, -1):
+            if pattern_re.search(self.files[i]):
+                del self.files[i]
+                files_found = 1
+
+        return files_found
+
+    #
+    # Private API
+    #
+
+    def _parse_template_line(self, line):
+        words = line.split()
+        action = words[0]
+
+        patterns = dir = dir_pattern = None
+
+        if action in ('include', 'exclude',
+                      'global-include', 'global-exclude'):
+            if len(words) < 2:
+                raise DistutilsTemplateError(
+                      "'%s' expects <pattern1> <pattern2> ..." % action)
+
+            patterns = map(convert_path, words[1:])
+
+        elif action in ('recursive-include', 'recursive-exclude'):
+            if len(words) < 3:
+                raise DistutilsTemplateError(
+                      "'%s' expects <dir> <pattern1> <pattern2> ..." % action)
+
+            dir = convert_path(words[1])
+            patterns = map(convert_path, words[2:])
+
+        elif action in ('graft', 'prune'):
+            if len(words) != 2:
+                raise DistutilsTemplateError(
+                     "'%s' expects a single <dir_pattern>" % action)
+
+            dir_pattern = convert_path(words[1])
+
+        else:
+            raise DistutilsTemplateError("unknown action '%s'" % action)
+
+        return action, patterns, dir, dir_pattern
+
+    def _process_template_line(self, line):
+        # Parse the line: split it up, make sure the right number of words
+        # is there, and return the relevant words.  'action' is always
+        # defined: it's the first word of the line.  Which of the other
+        # three are defined depends on the action; it'll be either
+        # patterns, (dir and patterns), or (dir_pattern).
+        action, patterns, dir, dir_pattern = self._parse_template_line(line)
+
+        # OK, now we know that the action is valid and we have the
+        # right number of words on the line for that action -- so we
+        # can proceed with minimal error-checking.
+        if action == 'include':
+            for pattern in patterns:
+                if not self._include_pattern(pattern, anchor=1):
+                    logging.warning("warning: no files found matching '%s'" %
+                             pattern)
+
+        elif action == 'exclude':
+            for pattern in patterns:
+                if not self.exclude_pattern(pattern, anchor=1):
+                    logging.warning(("warning: no previously-included files "
+                              "found matching '%s'") % pattern)
+
+        elif action == 'global-include':
+            for pattern in patterns:
+                if not self._include_pattern(pattern, anchor=0):
+                    logging.warning(("warning: no files found matching '%s' " +
+                              "anywhere in distribution") % pattern)
+
+        elif action == 'global-exclude':
+            for pattern in patterns:
+                if not self.exclude_pattern(pattern, anchor=0):
+                    logging.warning(("warning: no previously-included files "
+                              "matching '%s' found anywhere in distribution") %
+                             pattern)
+
+        elif action == 'recursive-include':
+            for pattern in patterns:
+                if not self._include_pattern(pattern, prefix=dir):
+                    logging.warning(("warning: no files found matching '%s' "
+                                "under directory '%s'" % (pattern, dir)))
+
+        elif action == 'recursive-exclude':
+            for pattern in patterns:
+                if not self.exclude_pattern(pattern, prefix=dir):
+                    logging.warning(("warning: no previously-included files "
+                              "matching '%s' found under directory '%s'") %
+                             (pattern, dir))
+
+        elif action == 'graft':
+            if not self._include_pattern(None, prefix=dir_pattern):
+                logging.warning("warning: no directories found matching '%s'" %
+                         dir_pattern)
+
+        elif action == 'prune':
+            if not self.exclude_pattern(None, prefix=dir_pattern):
+                logging.warning(("no previously-included directories found " +
+                          "matching '%s'") % dir_pattern)
+        else:
+            raise DistutilsInternalError(
+                  "this cannot happen: invalid action '%s'" % action)
+
+    def _include_pattern(self, pattern, anchor=1, prefix=None, is_regex=0):
+        """Select strings (presumably filenames) from 'self.files' that
+        match 'pattern', a Unix-style wildcard (glob) pattern.
+
+        Patterns are not quite the same as implemented by the 'fnmatch'
+        module: '*' and '?'  match non-special characters, where "special"
+        is platform-dependent: slash on Unix; colon, slash, and backslash on
+        DOS/Windows; and colon on Mac OS.
+
+        If 'anchor' is true (the default), then the pattern match is more
+        stringent: "*.py" will match "foo.py" but not "foo/bar.py".  If
+        'anchor' is false, both of these will match.
+
+        If 'prefix' is supplied, then only filenames starting with 'prefix'
+        (itself a pattern) and ending with 'pattern', with anything in between
+        them, will match.  'anchor' is ignored in this case.
+
+        If 'is_regex' is true, 'anchor' and 'prefix' are ignored, and
+        'pattern' is assumed to be either a string containing a regex or a
+        regex object -- no translation is done, the regex is just compiled
+        and used as-is.
+
+        Selected strings will be added to self.files.
+
+        Return 1 if files are found.
+        """
+        files_found = 0
+        pattern_re = _translate_pattern(pattern, anchor, prefix, is_regex)
+
+        # delayed loading of allfiles list
+        if self.allfiles is None:
+            self.findall()
+
+        for name in self.allfiles:
+            if pattern_re.search(name):
+                self.files.append(name)
+                files_found = 1
+
+        return files_found
+
+
+
+#
+# Utility functions
+#
+
+def _findall(dir=os.curdir):
+    """Find all files under 'dir' and return the list of full filenames
+    (relative to 'dir').
+    """
+    from stat import ST_MODE, S_ISREG, S_ISDIR, S_ISLNK
+
+    list = []
+    stack = [dir]
+    pop = stack.pop
+    push = stack.append
+
+    while stack:
+        dir = pop()
+        names = os.listdir(dir)
+
+        for name in names:
+            if dir != os.curdir:        # avoid the dreaded "./" syndrome
+                fullname = os.path.join(dir, name)
+            else:
+                fullname = name
+
+            # Avoid excess stat calls -- just one will do, thank you!
+            stat = os.stat(fullname)
+            mode = stat[ST_MODE]
+            if S_ISREG(mode):
+                list.append(fullname)
+            elif S_ISDIR(mode) and not S_ISLNK(mode):
+                push(fullname)
+
+    return list
+
+
+
+def _glob_to_re(pattern):
+    """Translate a shell-like glob pattern to a regular expression.
+
+    Return a string containing the regex.  Differs from
+    'fnmatch.translate()' in that '*' does not match "special characters"
+    (which are platform-specific).
+    """
+    pattern_re = fnmatch.translate(pattern)
+
+    # '?' and '*' in the glob pattern become '.' and '.*' in the RE, which
+    # IMHO is wrong -- '?' and '*' aren't supposed to match slash in Unix,
+    # and by extension they shouldn't match such "special characters" under
+    # any OS.  So change all non-escaped dots in the RE to match any
+    # character except the special characters.
+    # XXX currently the "special characters" are just slash -- i.e. this is
+    # Unix-only.
+    pattern_re = re.sub(r'((?<!\\)(\\\\)*)\.', r'\1[^/]', pattern_re)
+
+    return pattern_re
+
+
+def _translate_pattern(pattern, anchor=1, prefix=None, is_regex=0):
+    """Translate a shell-like wildcard pattern to a compiled regular
+    expression.
+
+    Return the compiled regex.  If 'is_regex' true,
+    then 'pattern' is directly compiled to a regex (if it's a string)
+    or just returned as-is (assumes it's a regex object).
+    """
+    if is_regex:
+        if isinstance(pattern, str):
+            return re.compile(pattern)
+        else:
+            return pattern
+
+    if pattern:
+        pattern_re = _glob_to_re(pattern)
+    else:
+        pattern_re = ''
+
+    if prefix is not None:
+        # ditch end of pattern character
+        empty_pattern = _glob_to_re('')
+        prefix_re = _glob_to_re(prefix)[:-len(empty_pattern)]
+        pattern_re = "^" + os.path.join(prefix_re, ".*" + pattern_re)
+    else:                               # no prefix -- respect anchor flag
+        if anchor:
+            pattern_re = "^" + pattern_re
+
+    return re.compile(pattern_re)
diff --git a/src/distutils2/metadata.py b/src/distutils2/metadata.py
--- a/src/distutils2/metadata.py
+++ b/src/distutils2/metadata.py
@@ -13,7 +13,6 @@
 from tokenize import tokenize, NAME, OP, STRING, ENDMARKER
 
 from distutils2.log import warn
-from distutils2.util import rfc822_escape
 from distutils2.version import (is_valid_predicate, is_valid_version,
                                 is_valid_versions)
 from distutils2.errors import (MetadataConflictError,
@@ -25,7 +24,6 @@
     from docutils.parsers.rst import Parser
     from docutils import frontend
     from docutils import nodes
-    from StringIO import StringIO
 
     class SilentReporter(Reporter):
 
diff --git a/src/distutils2/mkpkg.py b/src/distutils2/mkpkg.py
--- a/src/distutils2/mkpkg.py
+++ b/src/distutils2/mkpkg.py
@@ -72,6 +72,8 @@
 ''',
 }
 
+# XXX this list should be asked at PyPI (it changes)
+# then cached, rather than hardcoded
 troveList = [
         'Development Status :: 1 - Planning',
         'Development Status :: 2 - Pre-Alpha',
diff --git a/src/distutils2/tests/__init__.py b/src/distutils2/tests/__init__.py
--- a/src/distutils2/tests/__init__.py
+++ b/src/distutils2/tests/__init__.py
@@ -22,7 +22,6 @@
 
 verbose = 1
 
-
 def test_suite():
     suite = unittest2.TestSuite()
     for fn in os.listdir(here):
@@ -48,9 +47,11 @@
         return result
 
 
-def _run_suite(suite):
+def _run_suite(suite, verbose_=1):
     """Run tests from a unittest2.TestSuite-derived class."""
-    if verbose:
+    global verbose
+    verbose = verbose_
+    if verbose_:
         runner = unittest2.TextTestRunner(sys.stdout, verbosity=2)
     else:
         runner = BasicTestRunner()
@@ -66,7 +67,7 @@
         raise TestFailed(err)
 
 
-def run_unittest(*classes):
+def run_unittest(classes, verbose_=1):
     """Run tests from unittest2.TestCase-derived classes.
 
     Extracted from stdlib test.test_support and modified to support unittest2.
@@ -83,7 +84,7 @@
             suite.addTest(cls)
         else:
             suite.addTest(unittest2.makeSuite(cls))
-    _run_suite(suite)
+    _run_suite(suite, verbose_)
 
 
 def reap_children():
diff --git a/src/distutils2/tests/conversions/01_after.py b/src/distutils2/tests/conversions/01_after.py
new file mode 100644
--- /dev/null
+++ b/src/distutils2/tests/conversions/01_after.py
@@ -0,0 +1,4 @@
+from distutils2.core import setup
+
+setup(name='Foo')
+
diff --git a/src/distutils2/tests/conversions/01_before.py b/src/distutils2/tests/conversions/01_before.py
new file mode 100644
--- /dev/null
+++ b/src/distutils2/tests/conversions/01_before.py
@@ -0,0 +1,4 @@
+from distutils.core import setup
+
+setup(name='Foo')
+
diff --git a/src/distutils2/tests/conversions/02_after.py b/src/distutils2/tests/conversions/02_after.py
new file mode 100644
--- /dev/null
+++ b/src/distutils2/tests/conversions/02_after.py
@@ -0,0 +1,46 @@
+# -*- encoding: utf8 -*-
+import sys
+import os
+from distutils2.core import setup, Extension
+from distutils2.errors import CCompilerError, DistutilsError, CompileError
+from distutils2.command.build_ext import build_ext as distutils_build_ext
+
+VERSION = "0.1"
+
+class build_ext(distutils_build_ext):
+
+    def build_extension(self, ext):
+        try:
+            return distutils_build_ext.build_extension(self, ext)
+        except (CCompilerError, DistutilsError, CompileError), e:
+            pass
+
+def _get_ext_modules():
+    levenshtein = Extension('_levenshtein',
+                            sources=[os.path.join('texttools',
+                                                  '_levenshtein.c')])
+    return [levenshtein]
+
+with open('README.txt') as f:
+    LONG_DESCRIPTION = f.read()
+
+setup(name="TextTools", version=VERSION, author="Tarek Ziade",
+      author_email="tarek at ziade.org",
+      home_page="http://bitbucket.org/tarek/texttools",
+      summary="Text manipulation utilities",
+      description=LONG_DESCRIPTION,
+      keywords="text,guess,levenshtein",
+      classifiers=[
+         'Development Status :: 4 - Beta',
+         'Intended Audience :: Developers',
+         'License :: OSI Approved :: Python Software Foundation License'
+      ],
+      cmdclass={'build_ext': build_ext},
+      packages=['texttools'],
+      package_dir={'texttools': 'texttools'},
+      package_data={'texttools': [os.path.join('samples', '*.txt')]},
+      scripts=[os.path.join('scripts', 'levenshtein.py'),
+               os.path.join('scripts', 'guesslang.py')],
+      ext_modules=_get_ext_modules()
+      )
+
diff --git a/src/distutils2/tests/conversions/02_before.py b/src/distutils2/tests/conversions/02_before.py
new file mode 100644
--- /dev/null
+++ b/src/distutils2/tests/conversions/02_before.py
@@ -0,0 +1,46 @@
+# -*- encoding: utf8 -*-
+import sys
+import os
+from distutils.core import setup, Extension
+from distutils.errors import CCompilerError, DistutilsError, CompileError
+from distutils.command.build_ext import build_ext as distutils_build_ext
+
+VERSION = "0.1"
+
+class build_ext(distutils_build_ext):
+
+    def build_extension(self, ext):
+        try:
+            return distutils_build_ext.build_extension(self, ext)
+        except (CCompilerError, DistutilsError, CompileError), e:
+            pass
+
+def _get_ext_modules():
+    levenshtein = Extension('_levenshtein',
+                            sources=[os.path.join('texttools',
+                                                  '_levenshtein.c')])
+    return [levenshtein]
+
+with open('README.txt') as f:
+    LONG_DESCRIPTION = f.read()
+
+setup(name="TextTools", version=VERSION, author="Tarek Ziade",
+      author_email="tarek at ziade.org",
+      url="http://bitbucket.org/tarek/texttools",
+      description="Text manipulation utilities",
+      long_description=LONG_DESCRIPTION,
+      keywords="text,guess,levenshtein",
+      classifiers=[
+         'Development Status :: 4 - Beta',
+         'Intended Audience :: Developers',
+         'License :: OSI Approved :: Python Software Foundation License'
+      ],
+      cmdclass={'build_ext': build_ext},
+      packages=['texttools'],
+      package_dir={'texttools': 'texttools'},
+      package_data={'texttools': [os.path.join('samples', '*.txt')]},
+      scripts=[os.path.join('scripts', 'levenshtein.py'),
+               os.path.join('scripts', 'guesslang.py')],
+      ext_modules=_get_ext_modules()
+      )
+
diff --git a/src/distutils2/tests/conversions/03_after.py b/src/distutils2/tests/conversions/03_after.py
new file mode 100644
--- /dev/null
+++ b/src/distutils2/tests/conversions/03_after.py
@@ -0,0 +1,93 @@
+##############################################################################
+#
+# Copyright (c) 2006-2009 Zope Corporation and Contributors.
+# All Rights Reserved.
+#
+# This software is subject to the provisions of the Zope Public License,
+# Version 2.1 (ZPL).  A copy of the ZPL should accompany this distribution.
+# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
+# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
+# FOR A PARTICULAR PURPOSE.
+#
+##############################################################################
+name = "zc.buildout"
+version = "1.5.0dev"
+
+import os
+from distutils2.core import setup
+
+def read(*rnames):
+    return open(os.path.join(os.path.dirname(__file__), *rnames)).read()
+
+long_description=(
+        read('README.txt')
+        + '\n' +
+        'Detailed Documentation\n'
+        '**********************\n'
+        + '\n' +
+        read('src', 'zc', 'buildout', 'buildout.txt')
+        + '\n' +
+        read('src', 'zc', 'buildout', 'unzip.txt')
+        + '\n' +
+        read('src', 'zc', 'buildout', 'repeatable.txt')
+        + '\n' +
+        read('src', 'zc', 'buildout', 'download.txt')
+        + '\n' +
+        read('src', 'zc', 'buildout', 'downloadcache.txt')
+        + '\n' +
+        read('src', 'zc', 'buildout', 'extends-cache.txt')
+        + '\n' +
+        read('src', 'zc', 'buildout', 'setup.txt')
+        + '\n' +
+        read('src', 'zc', 'buildout', 'update.txt')
+        + '\n' +
+        read('src', 'zc', 'buildout', 'debugging.txt')
+        + '\n' +
+        read('src', 'zc', 'buildout', 'testing.txt')
+        + '\n' +
+        read('src', 'zc', 'buildout', 'easy_install.txt')
+        + '\n' +
+        read('src', 'zc', 'buildout', 'distribute.txt')
+        + '\n' +
+        read('CHANGES.txt')
+        + '\n' +
+        'Download\n'
+        '**********************\n'
+        )
+
+entry_points = """
+[console_scripts]
+buildout = %(name)s.buildout:main
+
+[zc.buildout]
+debug = %(name)s.testrecipes:Debug
+
+""" % dict(name=name)
+
+setup(
+    name = name,
+    version = version,
+    author = "Jim Fulton",
+    author_email = "jim at zope.com",
+    summary = "System for managing development buildouts",
+    description=long_description,
+    license = "ZPL 2.1",
+    keywords = "development build",
+    home_page='http://buildout.org',
+
+    data_files = [('.', ['README.txt'])],
+    packages = ['zc', 'zc.buildout'],
+    package_dir = {'': 'src'},
+    namespace_packages = ['zc'],
+    requires_dist = ['setuptools'],
+    include_package_data = True,
+    entry_points = entry_points,
+    zip_safe=False,
+    classifiers = [
+       'Intended Audience :: Developers',
+       'License :: OSI Approved :: Zope Public License',
+       'Topic :: Software Development :: Build Tools',
+       'Topic :: Software Development :: Libraries :: Python Modules',
+       ],
+    )
diff --git a/src/distutils2/tests/conversions/03_before.py b/src/distutils2/tests/conversions/03_before.py
new file mode 100644
--- /dev/null
+++ b/src/distutils2/tests/conversions/03_before.py
@@ -0,0 +1,93 @@
+##############################################################################
+#
+# Copyright (c) 2006-2009 Zope Corporation and Contributors.
+# All Rights Reserved.
+#
+# This software is subject to the provisions of the Zope Public License,
+# Version 2.1 (ZPL).  A copy of the ZPL should accompany this distribution.
+# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
+# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
+# FOR A PARTICULAR PURPOSE.
+#
+##############################################################################
+name = "zc.buildout"
+version = "1.5.0dev"
+
+import os
+from setuptools import setup
+
+def read(*rnames):
+    return open(os.path.join(os.path.dirname(__file__), *rnames)).read()
+
+long_description=(
+        read('README.txt')
+        + '\n' +
+        'Detailed Documentation\n'
+        '**********************\n'
+        + '\n' +
+        read('src', 'zc', 'buildout', 'buildout.txt')
+        + '\n' +
+        read('src', 'zc', 'buildout', 'unzip.txt')
+        + '\n' +
+        read('src', 'zc', 'buildout', 'repeatable.txt')
+        + '\n' +
+        read('src', 'zc', 'buildout', 'download.txt')
+        + '\n' +
+        read('src', 'zc', 'buildout', 'downloadcache.txt')
+        + '\n' +
+        read('src', 'zc', 'buildout', 'extends-cache.txt')
+        + '\n' +
+        read('src', 'zc', 'buildout', 'setup.txt')
+        + '\n' +
+        read('src', 'zc', 'buildout', 'update.txt')
+        + '\n' +
+        read('src', 'zc', 'buildout', 'debugging.txt')
+        + '\n' +
+        read('src', 'zc', 'buildout', 'testing.txt')
+        + '\n' +
+        read('src', 'zc', 'buildout', 'easy_install.txt')
+        + '\n' +
+        read('src', 'zc', 'buildout', 'distribute.txt')
+        + '\n' +
+        read('CHANGES.txt')
+        + '\n' +
+        'Download\n'
+        '**********************\n'
+        )
+
+entry_points = """
+[console_scripts]
+buildout = %(name)s.buildout:main
+
+[zc.buildout]
+debug = %(name)s.testrecipes:Debug
+
+""" % dict(name=name)
+
+setup(
+    name = name,
+    version = version,
+    author = "Jim Fulton",
+    author_email = "jim at zope.com",
+    description = "System for managing development buildouts",
+    long_description=long_description,
+    license = "ZPL 2.1",
+    keywords = "development build",
+    url='http://buildout.org',
+
+    data_files = [('.', ['README.txt'])],
+    packages = ['zc', 'zc.buildout'],
+    package_dir = {'': 'src'},
+    namespace_packages = ['zc'],
+    install_requires = 'setuptools',
+    include_package_data = True,
+    entry_points = entry_points,
+    zip_safe=False,
+    classifiers = [
+       'Intended Audience :: Developers',
+       'License :: OSI Approved :: Zope Public License',
+       'Topic :: Software Development :: Build Tools',
+       'Topic :: Software Development :: Libraries :: Python Modules',
+       ],
+    )
diff --git a/src/distutils2/tests/conversions/04_after.py b/src/distutils2/tests/conversions/04_after.py
new file mode 100644
--- /dev/null
+++ b/src/distutils2/tests/conversions/04_after.py
@@ -0,0 +1,69 @@
+import sys, os
+try:
+    from distutils2.core import setup
+    kw = {'entry_points':
+          """[console_scripts]\nvirtualenv = virtualenv:main\n""",
+          'zip_safe': False}
+except ImportError:
+    from distutils2.core import setup
+    if sys.platform == 'win32':
+        print('Note: without Setuptools installed you will have to use "python -m virtualenv ENV"')
+    else:
+        kw = {'scripts': ['scripts/virtualenv']}
+import re
+
+here = os.path.dirname(os.path.abspath(__file__))
+
+## Figure out the version from virtualenv.py:
+version_re = re.compile(
+    r'virtualenv_version = "(.*?)"')
+fp = open(os.path.join(here, 'virtualenv.py'))
+version = None
+for line in fp:
+    match = version_re.search(line)
+    if match:
+        version = match.group(1)
+        break
+else:
+    raise Exception("Cannot find version in virtualenv.py")
+fp.close()
+
+## Get long_description from index.txt:
+f = open(os.path.join(here, 'docs', 'index.txt'))
+long_description = f.read().strip()
+long_description = long_description.split('split here', 1)[1]
+f.close()
+
+## A warning just for Ian (related to distribution):
+try:
+    import getpass
+except ImportError:
+    is_ianb = False
+else:
+    is_ianb = getpass.getuser() == 'ianb'
+
+if is_ianb and 'register' in sys.argv:
+    if 'hg tip\n~~~~~~' in long_description:
+        print >> sys.stderr, (
+            "WARNING: hg tip is in index.txt")
+
+setup(name='virtualenv',
+      version=version,
+      summary="Virtual Python Environment builder",
+      description=long_description,
+      classifiers=[
+        'Development Status :: 4 - Beta',
+        'Intended Audience :: Developers',
+        'License :: OSI Approved :: MIT License',
+      ],
+      keywords='setuptools deployment installation distutils',
+      author='Ian Bicking',
+      author_email='ianb at colorstudy.com',
+      home_page='http://virtualenv.openplans.org',
+      license='MIT',
+      use_2to3=True,
+      py_modules=['virtualenv'],
+      packages=['virtualenv_support'],
+      package_data={'virtualenv_support': ['*-py%s.egg' % sys.version[:3], '*.tar.gz']},
+      **kw
+      )
diff --git a/src/distutils2/tests/conversions/04_before.py b/src/distutils2/tests/conversions/04_before.py
new file mode 100644
--- /dev/null
+++ b/src/distutils2/tests/conversions/04_before.py
@@ -0,0 +1,69 @@
+import sys, os
+try:
+    from setuptools import setup
+    kw = {'entry_points':
+          """[console_scripts]\nvirtualenv = virtualenv:main\n""",
+          'zip_safe': False}
+except ImportError:
+    from distutils.core import setup
+    if sys.platform == 'win32':
+        print('Note: without Setuptools installed you will have to use "python -m virtualenv ENV"')
+    else:
+        kw = {'scripts': ['scripts/virtualenv']}
+import re
+
+here = os.path.dirname(os.path.abspath(__file__))
+
+## Figure out the version from virtualenv.py:
+version_re = re.compile(
+    r'virtualenv_version = "(.*?)"')
+fp = open(os.path.join(here, 'virtualenv.py'))
+version = None
+for line in fp:
+    match = version_re.search(line)
+    if match:
+        version = match.group(1)
+        break
+else:
+    raise Exception("Cannot find version in virtualenv.py")
+fp.close()
+
+## Get long_description from index.txt:
+f = open(os.path.join(here, 'docs', 'index.txt'))
+long_description = f.read().strip()
+long_description = long_description.split('split here', 1)[1]
+f.close()
+
+## A warning just for Ian (related to distribution):
+try:
+    import getpass
+except ImportError:
+    is_ianb = False
+else:
+    is_ianb = getpass.getuser() == 'ianb'
+
+if is_ianb and 'register' in sys.argv:
+    if 'hg tip\n~~~~~~' in long_description:
+        print >> sys.stderr, (
+            "WARNING: hg tip is in index.txt")
+
+setup(name='virtualenv',
+      version=version,
+      description="Virtual Python Environment builder",
+      long_description=long_description,
+      classifiers=[
+        'Development Status :: 4 - Beta',
+        'Intended Audience :: Developers',
+        'License :: OSI Approved :: MIT License',
+      ],
+      keywords='setuptools deployment installation distutils',
+      author='Ian Bicking',
+      author_email='ianb at colorstudy.com',
+      url='http://virtualenv.openplans.org',
+      license='MIT',
+      use_2to3=True,
+      py_modules=['virtualenv'],
+      packages=['virtualenv_support'],
+      package_data={'virtualenv_support': ['*-py%s.egg' % sys.version[:3], '*.tar.gz']},
+      **kw
+      )
diff --git a/src/distutils2/tests/test_bdist.py b/src/distutils2/tests/test_bdist.py
--- a/src/distutils2/tests/test_bdist.py
+++ b/src/distutils2/tests/test_bdist.py
@@ -31,8 +31,7 @@
         # XXX an explicit list in bdist is
         # not the best way to  bdist_* commands
         # we should add a registry
-        formats = ['rpm', 'zip', 'gztar', 'bztar', 'ztar',
-                   'tar', 'wininst', 'msi']
+        formats = ['zip', 'gztar', 'bztar', 'ztar', 'tar', 'wininst', 'msi']
         formats.sort()
         founded = cmd.format_command.keys()
         founded.sort()
diff --git a/src/distutils2/tests/test_converter.py b/src/distutils2/tests/test_converter.py
new file mode 100644
--- /dev/null
+++ b/src/distutils2/tests/test_converter.py
@@ -0,0 +1,39 @@
+"""Tests for distutils.converter."""
+import os
+import sys
+import unittest2
+from distutils2.converter import DistutilsRefactoringTool
+
+_CURDIR = os.path.dirname(__file__)
+
+def _read_file(path):
+    # yes, distutils2 is 2.4 compatible, so, no with...
+    f = open(path)
+    try:
+        return f.read()
+    finally:
+        f.close()
+
+
+class ConverterTestCase(unittest2.TestCase):
+
+    @unittest2.skipUnless(not sys.version < '2.6', 'Needs Python >=2.6')
+    def test_conversions(self):
+        # for all XX_before in the conversions/ dir
+        # we run the refactoring tool
+        ref = DistutilsRefactoringTool()
+        convdir = os.path.join(_CURDIR, 'conversions')
+        for file_ in os.listdir(convdir):
+            if 'after' in file_ or not file_.endswith('py'):
+                continue
+            original = _read_file(os.path.join(convdir, file_))
+            wanted = file_.replace('before', 'after')
+            wanted = _read_file(os.path.join(convdir, wanted))
+            res = ref.refactor_string(original, 'setup.py')
+            self.assertEquals(str(res), wanted)
+
+def test_suite():
+    return unittest2.makeSuite(ConverterTestCase)
+
+if __name__ == '__main__':
+    unittest2.main(defaultTest="test_suite")
diff --git a/src/distutils2/tests/test_core.py b/src/distutils2/tests/test_core.py
--- a/src/distutils2/tests/test_core.py
+++ b/src/distutils2/tests/test_core.py
@@ -60,6 +60,19 @@
         distutils2.core.run_setup(
             self.write_setup(setup_using___file__))
 
+    def test_run_setup_stop_after(self):
+        f = self.write_setup(setup_using___file__)
+        for s in ['init', 'config', 'commandline', 'run']:
+            distutils2.core.run_setup(f, stop_after=s)
+        self.assertRaises(ValueError, distutils2.core.run_setup, 
+                          f, stop_after='bob')
+
+    def test_run_setup_args(self):
+        f = self.write_setup(setup_using___file__)
+        d = distutils2.core.run_setup(f, script_args=["--help"], 
+                                        stop_after="init")
+        self.assertEqual(['--help'], d.script_args)
+
     def test_run_setup_uses_current_dir(self):
         # This tests that the setup script is run with the current directory
         # as its own current directory; this was temporarily broken by a
diff --git a/src/distutils2/tests/test_filelist.py b/src/distutils2/tests/test_filelist.py
deleted file mode 100644
--- a/src/distutils2/tests/test_filelist.py
+++ /dev/null
@@ -1,69 +0,0 @@
-"""Tests for distutils.filelist."""
-from os.path import join
-import unittest2
-from distutils2.tests import captured_stdout
-
-from distutils2.filelist import glob_to_re, FileList
-
-MANIFEST_IN = """\
-include ok
-include xo
-exclude xo
-include foo.tmp
-global-include *.x
-global-include *.txt
-global-exclude *.tmp
-recursive-include f *.oo
-recursive-exclude global *.x
-graft dir
-prune dir3
-"""
-
-class FileListTestCase(unittest2.TestCase):
-
-    # this only works on 2.7
-    def _test_glob_to_re(self):
-        # simple cases
-        self.assertEquals(glob_to_re('foo*'), 'foo[^/]*\\Z(?ms)')
-        self.assertEquals(glob_to_re('foo?'), 'foo[^/]\\Z(?ms)')
-        self.assertEquals(glob_to_re('foo??'), 'foo[^/][^/]\\Z(?ms)')
-
-        # special cases
-        self.assertEquals(glob_to_re(r'foo\\*'), r'foo\\\\[^/]*\Z(?ms)')
-        self.assertEquals(glob_to_re(r'foo\\\*'), r'foo\\\\\\[^/]*\Z(?ms)')
-        self.assertEquals(glob_to_re('foo????'), r'foo[^/][^/][^/][^/]\Z(?ms)')
-        self.assertEquals(glob_to_re(r'foo\\??'), r'foo\\\\[^/][^/]\Z(?ms)')
-
-    def test_process_template_line(self):
-        # testing  all MANIFEST.in template patterns
-        file_list = FileList()
-
-        # simulated file list
-        file_list.allfiles = ['foo.tmp', 'ok', 'xo', 'four.txt',
-                              join('global', 'one.txt'),
-                              join('global', 'two.txt'),
-                              join('global', 'files.x'),
-                              join('global', 'here.tmp'),
-                              join('f', 'o', 'f.oo'),
-                              join('dir', 'graft-one'),
-                              join('dir', 'dir2', 'graft2'),
-                              join('dir3', 'ok'),
-                              join('dir3', 'sub', 'ok.txt')
-                              ]
-
-        for line in MANIFEST_IN.split('\n'):
-            if line.strip() == '':
-                continue
-            file_list.process_template_line(line)
-
-        wanted = ['ok', 'four.txt', join('global', 'one.txt'),
-                  join('global', 'two.txt'), join('f', 'o', 'f.oo'),
-                  join('dir', 'graft-one'), join('dir', 'dir2', 'graft2')]
-
-        self.assertEquals(file_list.files, wanted)
-
-def test_suite():
-    return unittest2.makeSuite(FileListTestCase)
-
-if __name__ == "__main__":
-    unittest2.main(defaultTest="test_suite")
diff --git a/src/distutils2/tests/test_manifest.py b/src/distutils2/tests/test_manifest.py
new file mode 100644
--- /dev/null
+++ b/src/distutils2/tests/test_manifest.py
@@ -0,0 +1,57 @@
+"""Tests for distutils.manifest."""
+import unittest2
+import os
+import sys
+import logging
+
+from distutils2.tests import support
+from distutils2.manifest import Manifest
+
+_MANIFEST = """\
+recursive-include foo *.py   # ok
+# nothing here
+
+#
+
+recursive-include bar \\
+  *.dat   *.txt
+"""
+
+class ManifestTestCase(support.TempdirManager,
+                       unittest2.TestCase):
+
+    def test_manifest_reader(self):
+
+        tmpdir = self.mkdtemp()
+        MANIFEST = os.path.join(tmpdir, 'MANIFEST.in')
+        f = open(MANIFEST, 'w')
+        try:
+            f.write(_MANIFEST)
+        finally:
+            f.close()
+        manifest = Manifest()
+
+        warns = []
+        def _warn(msg):
+            warns.append(msg)
+
+        old_warn = logging.warning
+        logging.warning = _warn
+        try:
+            manifest.read_template(MANIFEST)
+        finally:
+            logging.warning = old_warn
+
+        # the manifest should have been read
+        # and 3 warnings issued (we ddidn't provided the files)
+        self.assertEquals(len(warns), 3)
+        for warn in warns:
+            self.assertIn('warning: no files found matching', warn)
+
+
+
+def test_suite():
+    return unittest2.makeSuite(ManifestTestCase)
+
+if __name__ == '__main__':
+    run_unittest(test_suite())
diff --git a/src/distutils2/tests/test_sdist.py b/src/distutils2/tests/test_sdist.py
--- a/src/distutils2/tests/test_sdist.py
+++ b/src/distutils2/tests/test_sdist.py
@@ -257,6 +257,9 @@
         cmd.metadata_check = 0
         cmd.run()
         warnings = self.get_logs(WARN)
+        # removing manifest generated warnings
+        warnings = [warn for warn in warnings if
+                    not warn.endswith('-- skipping')]
         self.assertEquals(len(warnings), 0)
 
 
@@ -340,6 +343,19 @@
         finally:
             archive.close()
 
+    def test_get_file_list(self):
+        dist, cmd = self.get_cmd()
+        cmd.finalize_options()
+        cmd.template = os.path.join(self.tmp_dir, 'MANIFEST.in')
+        f = open(cmd.template, 'w')
+        try:
+            f.write('include MANIFEST.in\n')
+        finally:
+            f.close()
+
+        cmd.get_file_list()
+        self.assertIn('MANIFEST.in', cmd.filelist.files)
+
 def test_suite():
     return unittest2.makeSuite(SDistTestCase)
 
diff --git a/src/distutils2/tests/test_text_file.py b/src/distutils2/tests/test_text_file.py
deleted file mode 100644
--- a/src/distutils2/tests/test_text_file.py
+++ /dev/null
@@ -1,88 +0,0 @@
-"""Tests for distutils.text_file."""
-import os
-import unittest2
-from distutils2.text_file import TextFile
-from distutils2.tests import support
-
-TEST_DATA = """# test file
-
-line 3 \\
-# intervening comment
-  continues on next line
-"""
-
-class TextFileTestCase(support.TempdirManager, unittest2.TestCase):
-
-    def test_class(self):
-        # old tests moved from text_file.__main__
-        # so they are really called by the buildbots
-
-        # result 1: no fancy options
-        result1 = ['# test file\n', '\n', 'line 3 \\\n',
-                   '# intervening comment\n',
-                   '  continues on next line\n']
-
-        # result 2: just strip comments
-        result2 = ["\n",
-                   "line 3 \\\n",
-                   "  continues on next line\n"]
-
-        # result 3: just strip blank lines
-        result3 = ["# test file\n",
-                   "line 3 \\\n",
-                   "# intervening comment\n",
-                   "  continues on next line\n"]
-
-        # result 4: default, strip comments, blank lines,
-        # and trailing whitespace
-        result4 = ["line 3 \\",
-                   "  continues on next line"]
-
-        # result 5: strip comments and blanks, plus join lines (but don't
-        # "collapse" joined lines
-        result5 = ["line 3   continues on next line"]
-
-        # result 6: strip comments and blanks, plus join lines (and
-        # "collapse" joined lines
-        result6 = ["line 3 continues on next line"]
-
-        def test_input(count, description, file, expected_result):
-            result = file.readlines()
-            self.assertEquals(result, expected_result)
-
-        tmpdir = self.mkdtemp()
-        filename = os.path.join(tmpdir, "test.txt")
-        out_file = open(filename, "w")
-        try:
-            out_file.write(TEST_DATA)
-        finally:
-            out_file.close()
-
-        in_file = TextFile (filename, strip_comments=0, skip_blanks=0,
-                            lstrip_ws=0, rstrip_ws=0)
-        test_input (1, "no processing", in_file, result1)
-
-        in_file = TextFile (filename, strip_comments=1, skip_blanks=0,
-                            lstrip_ws=0, rstrip_ws=0)
-        test_input (2, "strip comments", in_file, result2)
-
-        in_file = TextFile (filename, strip_comments=0, skip_blanks=1,
-                            lstrip_ws=0, rstrip_ws=0)
-        test_input (3, "strip blanks", in_file, result3)
-
-        in_file = TextFile (filename)
-        test_input (4, "default processing", in_file, result4)
-
-        in_file = TextFile (filename, strip_comments=1, skip_blanks=1,
-                            join_lines=1, rstrip_ws=1)
-        test_input (5, "join lines without collapsing", in_file, result5)
-
-        in_file = TextFile (filename, strip_comments=1, skip_blanks=1,
-                            join_lines=1, rstrip_ws=1, collapse_join=1)
-        test_input (6, "join lines with collapsing", in_file, result6)
-
-def test_suite():
-    return unittest2.makeSuite(TextFileTestCase)
-
-if __name__ == "__main__":
-    unittest2.main(defaultTest="test_suite")
diff --git a/src/distutils2/tests/test_util.py b/src/distutils2/tests/test_util.py
--- a/src/distutils2/tests/test_util.py
+++ b/src/distutils2/tests/test_util.py
@@ -294,7 +294,7 @@
         self.write_file(os.path.join(pkg5, '__init__.py'))
 
         res = find_packages([root], ['pkg1.pkg2'])
-        self.assertEquals(res, ['pkg1', 'pkg5', 'pkg1.pkg3', 'pkg1.pkg3.pkg6'])
+        self.assertEquals(set(res), set(['pkg1', 'pkg5', 'pkg1.pkg3', 'pkg1.pkg3.pkg6']))
 
 
 def test_suite():
diff --git a/src/distutils2/tests/test_version.py b/src/distutils2/tests/test_version.py
--- a/src/distutils2/tests/test_version.py
+++ b/src/distutils2/tests/test_version.py
@@ -139,13 +139,17 @@
         for predicate in predicates:
             v = VersionPredicate(predicate)
 
-        assert VersionPredicate('Hey (>=2.5,<2.7)').match('2.6')
-        assert VersionPredicate('Ho').match('2.6')
-        assert not VersionPredicate('Hey (>=2.5,!=2.6,<2.7)').match('2.6')
-        assert VersionPredicate('Ho (<3.0)').match('2.6')
-        assert VersionPredicate('Ho (<3.0,!=2.5)').match('2.6.0')
-        assert not VersionPredicate('Ho (<3.0,!=2.6)').match('2.6.0')
-
+        self.assertTrue(VersionPredicate('Hey (>=2.5,<2.7)').match('2.6'))
+        self.assertTrue(VersionPredicate('Ho').match('2.6'))
+        self.assertFalse(VersionPredicate('Hey (>=2.5,!=2.6,<2.7)').match('2.6'))
+        self.assertTrue(VersionPredicate('Ho (<3.0)').match('2.6'))
+        self.assertTrue(VersionPredicate('Ho (<3.0,!=2.5)').match('2.6.0'))
+        self.assertFalse(VersionPredicate('Ho (<3.0,!=2.6)').match('2.6.0'))
+        self.assertTrue(VersionPredicate('Ho (2.5)').match('2.5.4'))
+        self.assertFalse(VersionPredicate('Ho (!=2.5)').match('2.5.2'))
+        self.assertTrue(VersionPredicate('Hey (<=2.5)').match('2.5.9'))
+        self.assertFalse(VersionPredicate('Hey (<=2.5)').match('2.6.0'))
+        self.assertTrue(VersionPredicate('Hey (>=2.5)').match('2.5.1'))
 
         # XXX need to silent the micro version in this case
         #assert not VersionPredicate('Ho (<3.0,!=2.6)').match('2.6.3')
diff --git a/src/distutils2/text_file.py b/src/distutils2/text_file.py
deleted file mode 100644
--- a/src/distutils2/text_file.py
+++ /dev/null
@@ -1,304 +0,0 @@
-"""text_file
-
-provides the TextFile class, which gives an interface to text files
-that (optionally) takes care of stripping comments, ignoring blank
-lines, and joining lines with backslashes."""
-
-__revision__ = "$Id: text_file.py 76956 2009-12-21 01:22:46Z tarek.ziade $"
-
-import sys
-
-
-class TextFile:
-
-    """Provides a file-like object that takes care of all the things you
-       commonly want to do when processing a text file that has some
-       line-by-line syntax: strip comments (as long as "#" is your
-       comment character), skip blank lines, join adjacent lines by
-       escaping the newline (ie. backslash at end of line), strip
-       leading and/or trailing whitespace.  All of these are optional
-       and independently controllable.
-
-       Provides a 'warn()' method so you can generate warning messages that
-       report physical line number, even if the logical line in question
-       spans multiple physical lines.  Also provides 'unreadline()' for
-       implementing line-at-a-time lookahead.
-
-       Constructor is called as:
-
-           TextFile (filename=None, file=None, **options)
-
-       It bombs (RuntimeError) if both 'filename' and 'file' are None;
-       'filename' should be a string, and 'file' a file object (or
-       something that provides 'readline()' and 'close()' methods).  It is
-       recommended that you supply at least 'filename', so that TextFile
-       can include it in warning messages.  If 'file' is not supplied,
-       TextFile creates its own using the 'open()' builtin.
-
-       The options are all boolean, and affect the value returned by
-       'readline()':
-         strip_comments [default: true]
-           strip from "#" to end-of-line, as well as any whitespace
-           leading up to the "#" -- unless it is escaped by a backslash
-         lstrip_ws [default: false]
-           strip leading whitespace from each line before returning it
-         rstrip_ws [default: true]
-           strip trailing whitespace (including line terminator!) from
-           each line before returning it
-         skip_blanks [default: true}
-           skip lines that are empty *after* stripping comments and
-           whitespace.  (If both lstrip_ws and rstrip_ws are false,
-           then some lines may consist of solely whitespace: these will
-           *not* be skipped, even if 'skip_blanks' is true.)
-         join_lines [default: false]
-           if a backslash is the last non-newline character on a line
-           after stripping comments and whitespace, join the following line
-           to it to form one "logical line"; if N consecutive lines end
-           with a backslash, then N+1 physical lines will be joined to
-           form one logical line.
-         collapse_join [default: false]
-           strip leading whitespace from lines that are joined to their
-           predecessor; only matters if (join_lines and not lstrip_ws)
-
-       Note that since 'rstrip_ws' can strip the trailing newline, the
-       semantics of 'readline()' must differ from those of the builtin file
-       object's 'readline()' method!  In particular, 'readline()' returns
-       None for end-of-file: an empty string might just be a blank line (or
-       an all-whitespace line), if 'rstrip_ws' is true but 'skip_blanks' is
-       not."""
-
-    default_options = { 'strip_comments': 1,
-                        'skip_blanks':    1,
-                        'lstrip_ws':      0,
-                        'rstrip_ws':      1,
-                        'join_lines':     0,
-                        'collapse_join':  0,
-                      }
-
-    def __init__ (self, filename=None, file=None, **options):
-        """Construct a new TextFile object.  At least one of 'filename'
-           (a string) and 'file' (a file-like object) must be supplied.
-           They keyword argument options are described above and affect
-           the values returned by 'readline()'."""
-
-        if filename is None and file is None:
-            raise RuntimeError, \
-                  "you must supply either or both of 'filename' and 'file'"
-
-        # set values for all options -- either from client option hash
-        # or fallback to default_options
-        for opt in self.default_options.keys():
-            if opt in options:
-                setattr (self, opt, options[opt])
-
-            else:
-                setattr (self, opt, self.default_options[opt])
-
-        # sanity check client option hash
-        for opt in options.keys():
-            if opt not in self.default_options:
-                raise KeyError, "invalid TextFile option '%s'" % opt
-
-        if file is None:
-            self.open (filename)
-        else:
-            self.filename = filename
-            self.file = file
-            self.current_line = 0       # assuming that file is at BOF!
-
-        # 'linebuf' is a stack of lines that will be emptied before we
-        # actually read from the file; it's only populated by an
-        # 'unreadline()' operation
-        self.linebuf = []
-
-
-    def open (self, filename):
-        """Open a new file named 'filename'.  This overrides both the
-           'filename' and 'file' arguments to the constructor."""
-
-        self.filename = filename
-        self.file = open (self.filename, 'r')
-        self.current_line = 0
-
-
-    def close (self):
-        """Close the current file and forget everything we know about it
-           (filename, current line number)."""
-
-        self.file.close ()
-        self.file = None
-        self.filename = None
-        self.current_line = None
-
-
-    def gen_error (self, msg, line=None):
-        outmsg = []
-        if line is None:
-            line = self.current_line
-        outmsg.append(self.filename + ", ")
-        if isinstance(line, (list, tuple)):
-            outmsg.append("lines %d-%d: " % tuple (line))
-        else:
-            outmsg.append("line %d: " % line)
-        outmsg.append(str(msg))
-        return ''.join(outmsg)
-
-
-    def error (self, msg, line=None):
-        raise ValueError, "error: " + self.gen_error(msg, line)
-
-    def warn (self, msg, line=None):
-        """Print (to stderr) a warning message tied to the current logical
-           line in the current file.  If the current logical line in the
-           file spans multiple physical lines, the warning refers to the
-           whole range, eg. "lines 3-5".  If 'line' supplied, it overrides
-           the current line number; it may be a list or tuple to indicate a
-           range of physical lines, or an integer for a single physical
-           line."""
-        sys.stderr.write("warning: " + self.gen_error(msg, line) + "\n")
-
-
-    def readline (self):
-        """Read and return a single logical line from the current file (or
-           from an internal buffer if lines have previously been "unread"
-           with 'unreadline()').  If the 'join_lines' option is true, this
-           may involve reading multiple physical lines concatenated into a
-           single string.  Updates the current line number, so calling
-           'warn()' after 'readline()' emits a warning about the physical
-           line(s) just read.  Returns None on end-of-file, since the empty
-           string can occur if 'rstrip_ws' is true but 'strip_blanks' is
-           not."""
-
-        # If any "unread" lines waiting in 'linebuf', return the top
-        # one.  (We don't actually buffer read-ahead data -- lines only
-        # get put in 'linebuf' if the client explicitly does an
-        # 'unreadline()'.
-        if self.linebuf:
-            line = self.linebuf[-1]
-            del self.linebuf[-1]
-            return line
-
-        buildup_line = ''
-
-        while 1:
-            # read the line, make it None if EOF
-            line = self.file.readline()
-            if line == '': line = None
-
-            if self.strip_comments and line:
-
-                # Look for the first "#" in the line.  If none, never
-                # mind.  If we find one and it's the first character, or
-                # is not preceded by "\", then it starts a comment --
-                # strip the comment, strip whitespace before it, and
-                # carry on.  Otherwise, it's just an escaped "#", so
-                # unescape it (and any other escaped "#"'s that might be
-                # lurking in there) and otherwise leave the line alone.
-
-                pos = line.find("#")
-                if pos == -1:           # no "#" -- no comments
-                    pass
-
-                # It's definitely a comment -- either "#" is the first
-                # character, or it's elsewhere and unescaped.
-                elif pos == 0 or line[pos-1] != "\\":
-                    # Have to preserve the trailing newline, because it's
-                    # the job of a later step (rstrip_ws) to remove it --
-                    # and if rstrip_ws is false, we'd better preserve it!
-                    # (NB. this means that if the final line is all comment
-                    # and has no trailing newline, we will think that it's
-                    # EOF; I think that's OK.)
-                    eol = (line[-1] == '\n') and '\n' or ''
-                    line = line[0:pos] + eol
-
-                    # If all that's left is whitespace, then skip line
-                    # *now*, before we try to join it to 'buildup_line' --
-                    # that way constructs like
-                    #   hello \\
-                    #   # comment that should be ignored
-                    #   there
-                    # result in "hello there".
-                    if line.strip() == "":
-                        continue
-
-                else:                   # it's an escaped "#"
-                    line = line.replace("\\#", "#")
-
-
-            # did previous line end with a backslash? then accumulate
-            if self.join_lines and buildup_line:
-                # oops: end of file
-                if line is None:
-                    self.warn ("continuation line immediately precedes "
-                               "end-of-file")
-                    return buildup_line
-
-                if self.collapse_join:
-                    line = line.lstrip()
-                line = buildup_line + line
-
-                # careful: pay attention to line number when incrementing it
-                if isinstance(self.current_line, list):
-                    self.current_line[1] = self.current_line[1] + 1
-                else:
-                    self.current_line = [self.current_line,
-                                         self.current_line+1]
-            # just an ordinary line, read it as usual
-            else:
-                if line is None:        # eof
-                    return None
-
-                # still have to be careful about incrementing the line number!
-                if isinstance(self.current_line, list):
-                    self.current_line = self.current_line[1] + 1
-                else:
-                    self.current_line = self.current_line + 1
-
-
-            # strip whitespace however the client wants (leading and
-            # trailing, or one or the other, or neither)
-            if self.lstrip_ws and self.rstrip_ws:
-                line = line.strip()
-            elif self.lstrip_ws:
-                line = line.lstrip()
-            elif self.rstrip_ws:
-                line = line.rstrip()
-
-            # blank line (whether we rstrip'ed or not)? skip to next line
-            # if appropriate
-            if (line == '' or line == '\n') and self.skip_blanks:
-                continue
-
-            if self.join_lines:
-                if line[-1] == '\\':
-                    buildup_line = line[:-1]
-                    continue
-
-                if line[-2:] == '\\\n':
-                    buildup_line = line[0:-2] + '\n'
-                    continue
-
-            # well, I guess there's some actual content there: return it
-            return line
-
-    # readline ()
-
-
-    def readlines (self):
-        """Read and return the list of all logical lines remaining in the
-           current file."""
-
-        lines = []
-        while 1:
-            line = self.readline()
-            if line is None:
-                return lines
-            lines.append (line)
-
-
-    def unreadline (self, line):
-        """Push 'line' (a string) onto an internal buffer that will be
-           checked by future 'readline()' calls.  Handy for implementing
-           a parser with line-at-a-time lookahead."""
-
-        self.linebuf.append (line)
diff --git a/src/distutils2/version.py b/src/distutils2/version.py
--- a/src/distutils2/version.py
+++ b/src/distutils2/version.py
@@ -330,10 +330,11 @@
 
     _operators = {"<": lambda x, y: x < y,
                   ">": lambda x, y: x > y,
-                  "<=": lambda x, y: x <= y,
-                  ">=": lambda x, y: x >= y,
-                  "==": lambda x, y: x == y,
-                  "!=": lambda x, y: x != y}
+                  "<=": lambda x, y: str(x).startswith(str(y)) or x < y,
+                  ">=": lambda x, y: str(x).startswith(str(y)) or x > y,
+                  "==": lambda x, y: str(x).startswith(str(y)),
+                  "!=": lambda x, y: not str(x).startswith(str(y)),
+                  }
 
     def __init__(self, predicate):
         predicate = predicate.strip()
diff --git a/src/runtests.py b/src/runtests.py
--- a/src/runtests.py
+++ b/src/runtests.py
@@ -2,17 +2,34 @@
 
 The tests for distutils2 are defined in the distutils2.tests package;
 """
-
-import distutils2.tests
-from distutils2.tests import run_unittest, reap_children
-
-from distutils2._backport.tests import test_suite as btest_suite
+import sys
 
 def test_main():
-    run_unittest(distutils2.tests.test_suite())
-    run_unittest(btest_suite())
-    reap_children()
-
+    import distutils2.tests
+    from distutils2.tests import run_unittest, reap_children, TestFailed
+    from distutils2._backport.tests import test_suite as btest_suite
+    # just supporting -q right now
+    # to enable detailed/quiet output
+    if len(sys.argv) > 1:
+        verbose = sys.argv[-1] != '-q'
+    else:
+        verbose = 1
+    try:
+        try:
+            run_unittest([distutils2.tests.test_suite(), btest_suite()],
+                    verbose_=verbose)
+            return 0
+        except TestFailed:
+            return 1
+    finally:
+        reap_children()
 
 if __name__ == "__main__":
-    test_main()
+    try:
+        import unittest2
+    except ImportError:
+        print('!!! You need to install unittest2')
+        sys.exit(1)
+
+    sys.exit(test_main())
+
diff --git a/src/tests.sh b/src/tests.sh
new file mode 100755
--- /dev/null
+++ b/src/tests.sh
@@ -0,0 +1,28 @@
+#!/bin/sh
+echo -n "Running tests for Python 2.4..."
+python2.4 runtests.py -q > /dev/null 2> /dev/null
+if [ $? -ne 0 ];then
+    echo "Failed"
+    exit $1
+else
+    echo "Success"
+fi
+
+echo -n "Running tests for Python 2.5..."
+python2.5 runtests.py -q > /dev/null 2> /dev/null
+if [ $? -ne 0 ];then
+    echo "Failed"
+    exit $1
+else
+    echo "Success"
+fi
+
+echo -n "Running tests for Python 2.6..."
+python2.6 runtests.py -q > /dev/null 2> /dev/null
+if [ $? -ne 0 ];then
+    echo "Failed"
+    exit $1
+else
+    echo "Success"
+fi
+

--
Repository URL: http://hg.python.org/distutils2


More information about the Python-checkins mailing list