[Python-checkins] distutils2: Merge branch 'master' of /Users/gotcha/co/distutils2/git

tarek.ziade python-checkins at python.org
Wed Feb 16 22:23:55 CET 2011


tarek.ziade pushed e65761262dbf to distutils2:

http://hg.python.org/distutils2/rev/e65761262dbf
changeset:   996:e65761262dbf
parent:      995:7bbd7533bc87
parent:      994:f314fe720c70
user:        Godefroid Chapelle <gotcha at bubblenet.be>
date:        Sun Jan 30 12:30:49 2011 +0100
summary:
  Merge branch 'master' of /Users/gotcha/co/distutils2/git

files:
  

diff --git a/distutils2/_backport/__init__.py b/distutils2/_backport/__init__.py
--- a/distutils2/_backport/__init__.py
+++ b/distutils2/_backport/__init__.py
@@ -1,8 +1,2 @@
 """Things that will land in the Python 3.3 std lib but which we must drag along
 with us for now to support 2.x."""
-
-def any(seq):
-    for elem in seq:
-        if elem:
-            return True
-    return False
diff --git a/distutils2/_backport/pkgutil.py b/distutils2/_backport/pkgutil.py
--- a/distutils2/_backport/pkgutil.py
+++ b/distutils2/_backport/pkgutil.py
@@ -1,24 +1,19 @@
 """Utilities to support packages."""
 
-# NOTE: This module must remain compatible with Python 2.3, as it is shared
-# by setuptools for distribution with Python 2.3 and up.
-
 import os
 import sys
 import imp
-import os.path
+import re
+import warnings
 from csv import reader as csv_reader
 from types import ModuleType
 from distutils2.errors import DistutilsError
 from distutils2.metadata import DistributionMetadata
 from distutils2.version import suggest_normalized_version, VersionPredicate
-import zipimport
 try:
     import cStringIO as StringIO
 except ImportError:
     import StringIO
-import re
-import warnings
 
 
 __all__ = [
@@ -28,10 +23,14 @@
     'Distribution', 'EggInfoDistribution', 'distinfo_dirname',
     'get_distributions', 'get_distribution', 'get_file_users',
     'provides_distribution', 'obsoletes_distribution',
-    'enable_cache', 'disable_cache', 'clear_cache'
+    'enable_cache', 'disable_cache', 'clear_cache',
 ]
 
 
+##########################
+# PEP 302 Implementation #
+##########################
+
 def read_code(stream):
     # This helper is needed in order for the :pep:`302` emulation to
     # correctly handle compiled files
@@ -41,7 +40,7 @@
     if magic != imp.get_magic():
         return None
 
-    stream.read(4) # Skip timestamp
+    stream.read(4)  # Skip timestamp
     return marshal.load(stream)
 
 
@@ -173,7 +172,6 @@
 
 #@simplegeneric
 def iter_importer_modules(importer, prefix=''):
-    ""
     if not hasattr(importer, 'iter_modules'):
         return []
     return importer.iter_modules(prefix)
@@ -331,9 +329,9 @@
     def get_filename(self, fullname=None):
         fullname = self._fix_name(fullname)
         mod_type = self.etc[2]
-        if self.etc[2] == imp.PKG_DIRECTORY:
+        if mod_type == imp.PKG_DIRECTORY:
             return self._get_delegate().get_filename()
-        elif self.etc[2] in (imp.PY_SOURCE, imp.PY_COMPILED, imp.C_EXTENSION):
+        elif mod_type in (imp.PY_SOURCE, imp.PY_COMPILED, imp.C_EXTENSION):
             return self.filename
         return None
 
@@ -432,7 +430,8 @@
     import mechanism will find the latter.
 
     Items of the following types can be affected by this discrepancy:
-        ``imp.C_EXTENSION, imp.PY_SOURCE, imp.PY_COMPILED, imp.PKG_DIRECTORY``
+    :data:`imp.C_EXTENSION`, :data:`imp.PY_SOURCE`, :data:`imp.PY_COMPILED`,
+    :data:`imp.PKG_DIRECTORY`
     """
     if fullname.startswith('.'):
         raise ImportError("Relative module names not supported")
@@ -534,13 +533,13 @@
         # frozen package.  Return the path unchanged in that case.
         return path
 
-    pname = os.path.join(*name.split('.')) # Reconstitute as relative path
+    pname = os.path.join(*name.split('.'))  # Reconstitute as relative path
     # Just in case os.extsep != '.'
     sname = os.extsep.join(name.split('.'))
     sname_pkg = sname + os.extsep + "pkg"
     init_py = "__init__" + os.extsep + "py"
 
-    path = path[:] # Start with a copy of the existing path
+    path = path[:]  # Start with a copy of the existing path
 
     for dir in sys.path:
         if not isinstance(dir, basestring) or not os.path.isdir(dir):
@@ -565,7 +564,7 @@
                     line = line.rstrip('\n')
                     if not line or line.startswith('#'):
                         continue
-                    path.append(line) # Don't check for existence!
+                    path.append(line)  # Don't check for existence!
                 f.close()
 
     return path
@@ -609,6 +608,7 @@
     resource_name = os.path.join(*parts)
     return loader.get_data(resource_name)
 
+
 ##########################
 # PEP 376 Implementation #
 ##########################
@@ -616,12 +616,12 @@
 DIST_FILES = ('INSTALLER', 'METADATA', 'RECORD', 'REQUESTED',)
 
 # Cache
-_cache_name = {} # maps names to Distribution instances
-_cache_name_egg = {} # maps names to EggInfoDistribution instances
-_cache_path = {} # maps paths to Distribution instances
-_cache_path_egg = {} # maps paths to EggInfoDistribution instances
-_cache_generated = False # indicates if .dist-info distributions are cached
-_cache_generated_egg = False # indicates if .dist-info and .egg are cached
+_cache_name = {}  # maps names to Distribution instances
+_cache_name_egg = {}  # maps names to EggInfoDistribution instances
+_cache_path = {}  # maps paths to Distribution instances
+_cache_path_egg = {}  # maps paths to EggInfoDistribution instances
+_cache_generated = False  # indicates if .dist-info distributions are cached
+_cache_generated_egg = False  # indicates if .dist-info and .egg are cached
 _cache_enabled = True
 
 
@@ -636,6 +636,7 @@
 
     _cache_enabled = True
 
+
 def disable_cache():
     """
     Disables the internal cache.
@@ -647,9 +648,10 @@
 
     _cache_enabled = False
 
+
 def clear_cache():
     """ Clears the internal cache. """
-    global _cache_name, _cache_name_egg, cache_path, _cache_path_egg, \
+    global _cache_name, _cache_name_egg, _cache_path, _cache_path_egg, \
            _cache_generated, _cache_generated_egg
 
     _cache_name = {}
@@ -660,14 +662,14 @@
     _cache_generated_egg = False
 
 
-def _yield_distributions(include_dist, include_egg):
+def _yield_distributions(include_dist, include_egg, paths=sys.path):
     """
     Yield .dist-info and .egg(-info) distributions, based on the arguments
 
     :parameter include_dist: yield .dist-info distributions
     :parameter include_egg: yield .egg(-info) distributions
     """
-    for path in sys.path:
+    for path in paths:
         realpath = os.path.realpath(path)
         if not os.path.isdir(realpath):
             continue
@@ -679,7 +681,7 @@
                                   dir.endswith('.egg')):
                 yield EggInfoDistribution(dist_path)
 
-def _generate_cache(use_egg_info=False):
+def _generate_cache(use_egg_info=False, paths=sys.path):
     global _cache_generated, _cache_generated_egg
 
     if _cache_generated_egg or (_cache_generated and not use_egg_info):
@@ -688,7 +690,7 @@
         gen_dist = not _cache_generated
         gen_egg = use_egg_info
 
-        for dist in _yield_distributions(gen_dist, gen_egg):
+        for dist in _yield_distributions(gen_dist, gen_egg, paths):
             if isinstance(dist, Distribution):
                 _cache_path[dist.path] = dist
                 if not dist.name in _cache_name:
@@ -872,7 +874,8 @@
             if isinstance(strs, basestring):
                 for s in strs.splitlines():
                     s = s.strip()
-                    if s and not s.startswith('#'): # skip blank lines/comments
+                    # skip blank lines/comments
+                    if s and not s.startswith('#'):
                         yield s
             else:
                 for ss in strs:
@@ -890,6 +893,7 @@
                 except IOError:
                     requires = None
             else:
+                # FIXME handle the case where zipfile is not available
                 zipf = zipimport.zipimporter(path)
                 fileobj = StringIO.StringIO(zipf.get_data('EGG-INFO/PKG-INFO'))
                 self.metadata = DistributionMetadata(fileobj=fileobj)
@@ -952,7 +956,7 @@
                             version = match.group('first')
                             if match.group('rest'):
                                 version += match.group('rest')
-                            version = version.replace(' ', '') # trim spaces
+                            version = version.replace(' ', '')  # trim spaces
                         if version is None:
                             reqs.append(name)
                         else:
@@ -982,12 +986,6 @@
     __hash__ = object.__hash__
 
 
-def _normalize_dist_name(name):
-    """Returns a normalized name from the given *name*.
-    :rtype: string"""
-    return name.replace('-', '_')
-
-
 def distinfo_dirname(name, version):
     """
     The *name* and *version* parameters are converted into their
@@ -1007,7 +1005,7 @@
     :returns: directory name
     :rtype: string"""
     file_extension = '.dist-info'
-    name = _normalize_dist_name(name)
+    name = name.replace('-', '_')
     normalized_version = suggest_normalized_version(version)
     # Because this is a lookup procedure, something will be returned even if
     #   it is a version that cannot be normalized
@@ -1017,7 +1015,7 @@
     return '-'.join([name, normalized_version]) + file_extension
 
 
-def get_distributions(use_egg_info=False):
+def get_distributions(use_egg_info=False, paths=sys.path):
     """
     Provides an iterator that looks for ``.dist-info`` directories in
     ``sys.path`` and returns :class:`Distribution` instances for each one of
@@ -1028,7 +1026,7 @@
             instances
     """
     if not _cache_enabled:
-        for dist in _yield_distributions(True, use_egg_info):
+        for dist in _yield_distributions(True, use_egg_info, paths):
             yield dist
     else:
         _generate_cache(use_egg_info)
@@ -1041,7 +1039,7 @@
                 yield dist
 
 
-def get_distribution(name, use_egg_info=False):
+def get_distribution(name, use_egg_info=False, paths=sys.path):
     """
     Scans all elements in ``sys.path`` and looks for all directories
     ending with ``.dist-info``. Returns a :class:`Distribution`
@@ -1059,7 +1057,7 @@
     :rtype: :class:`Distribution` or :class:`EggInfoDistribution` or None
     """
     if not _cache_enabled:
-        for dist in _yield_distributions(True, use_egg_info):
+        for dist in _yield_distributions(True, use_egg_info, paths):
             if dist.name == name:
                 return dist
     else:
@@ -1148,7 +1146,7 @@
                     raise DistutilsError(('Distribution %s has invalid ' +
                                           'provides field: %s') \
                                            % (dist.name, p))
-                p_ver = p_ver[1:-1] # trim off the parenthesis
+                p_ver = p_ver[1:-1]  # trim off the parenthesis
                 if p_name == name and predicate.match(p_ver):
                     yield dist
                     break
diff --git a/distutils2/_backport/shutil.py b/distutils2/_backport/shutil.py
--- a/distutils2/_backport/shutil.py
+++ b/distutils2/_backport/shutil.py
@@ -1,4 +1,4 @@
-"""Utility functions for copying files and directory trees.
+"""Utility functions for copying and archiving files and directory trees.
 
 XXX The functions here don't copy the resource fork or other metadata on Mac.
 
@@ -9,7 +9,13 @@
 import stat
 from os.path import abspath
 import fnmatch
-from warnings import warn
+import errno
+
+try:
+    import bz2
+    _BZ2_SUPPORTED = True
+except ImportError:
+    _BZ2_SUPPORTED = False
 
 try:
     from pwd import getpwnam
@@ -21,9 +27,12 @@
 except ImportError:
     getgrnam = None
 
-__all__ = ["copyfileobj","copyfile","copymode","copystat","copy","copy2",
-           "copytree","move","rmtree","Error", "SpecialFileError",
-           "ExecError","make_archive"]
+__all__ = ["copyfileobj", "copyfile", "copymode", "copystat", "copy", "copy2",
+           "copytree", "move", "rmtree", "Error", "SpecialFileError",
+           "ExecError", "make_archive", "get_archive_formats",
+           "register_archive_format", "unregister_archive_format",
+           "get_unpack_formats", "register_unpack_format",
+           "unregister_unpack_format", "unpack_archive"]
 
 class Error(EnvironmentError):
     pass
@@ -35,6 +44,14 @@
 class ExecError(EnvironmentError):
     """Raised when a command could not be executed"""
 
+class ReadError(EnvironmentError):
+    """Raised when an archive cannot be read"""
+
+class RegistryError(Exception):
+    """Raised when a registery operation with the archiving
+    and unpacking registeries fails"""
+
+
 try:
     WindowsError
 except NameError:
@@ -50,7 +67,7 @@
 
 def _samefile(src, dst):
     # Macintosh, Unix.
-    if hasattr(os.path,'samefile'):
+    if hasattr(os.path, 'samefile'):
         try:
             return os.path.samefile(src, dst)
         except OSError:
@@ -63,10 +80,8 @@
 def copyfile(src, dst):
     """Copy data from src to dst"""
     if _samefile(src, dst):
-        raise Error, "`%s` and `%s` are the same file" % (src, dst)
+        raise Error("`%s` and `%s` are the same file" % (src, dst))
 
-    fsrc = None
-    fdst = None
     for fn in [src, dst]:
         try:
             st = os.stat(fn)
@@ -77,15 +92,16 @@
             # XXX What about other special files? (sockets, devices...)
             if stat.S_ISFIFO(st.st_mode):
                 raise SpecialFileError("`%s` is a named pipe" % fn)
+
+    fsrc = open(src, 'rb')
     try:
-        fsrc = open(src, 'rb')
         fdst = open(dst, 'wb')
-        copyfileobj(fsrc, fdst)
+        try:
+            copyfileobj(fsrc, fdst)
+        finally:
+            fdst.close()
     finally:
-        if fdst:
-            fdst.close()
-        if fsrc:
-            fsrc.close()
+        fsrc.close()
 
 def copymode(src, dst):
     """Copy mode bits from src to dst"""
@@ -103,8 +119,12 @@
     if hasattr(os, 'chmod'):
         os.chmod(dst, mode)
     if hasattr(os, 'chflags') and hasattr(st, 'st_flags'):
-        os.chflags(dst, st.st_flags)
-
+        try:
+            os.chflags(dst, st.st_flags)
+        except OSError, why:
+            if (not hasattr(errno, 'EOPNOTSUPP') or
+                why.errno != errno.EOPNOTSUPP):
+                raise
 
 def copy(src, dst):
     """Copy data and mode bits ("cp src dst").
@@ -140,8 +160,9 @@
         return set(ignored_names)
     return _ignore_patterns
 
-def copytree(src, dst, symlinks=False, ignore=None):
-    """Recursively copy a directory tree using copy2().
+def copytree(src, dst, symlinks=False, ignore=None, copy_function=copy2,
+             ignore_dangling_symlinks=False):
+    """Recursively copy a directory tree.
 
     The destination directory must not already exist.
     If exception(s) occur, an Error is raised with a list of reasons.
@@ -149,7 +170,13 @@
     If the optional symlinks flag is true, symbolic links in the
     source tree result in symbolic links in the destination tree; if
     it is false, the contents of the files pointed to by symbolic
-    links are copied.
+    links are copied. If the file pointed by the symlink doesn't
+    exist, an exception will be added in the list of errors raised in
+    an Error exception at the end of the copy process.
+
+    You can set the optional ignore_dangling_symlinks flag to true if you
+    want to silence this exception. Notice that this has no effect on
+    platforms that don't support os.symlink.
 
     The optional ignore argument is a callable. If given, it
     is called with the `src` parameter, which is the directory
@@ -163,7 +190,10 @@
     list of names relative to the `src` directory that should
     not be copied.
 
-    XXX Consider this example code rather than the ultimate tool.
+    The optional copy_function argument is a callable that will be used
+    to copy each file. It will be called with the source path and the
+    destination path as arguments. By default, copy2() is used, but any
+    function that supports the same signature (like copy()) can be used.
 
     """
     names = os.listdir(src)
@@ -182,14 +212,21 @@
         srcname = os.path.join(src, name)
         dstname = os.path.join(dst, name)
         try:
-            if symlinks and os.path.islink(srcname):
+            if os.path.islink(srcname):
                 linkto = os.readlink(srcname)
-                os.symlink(linkto, dstname)
+                if symlinks:
+                    os.symlink(linkto, dstname)
+                else:
+                    # ignore dangling symlink if the flag is on
+                    if not os.path.exists(linkto) and ignore_dangling_symlinks:
+                        continue
+                    # otherwise let the copy occurs. copy2 will raise an error
+                    copy_function(srcname, dstname)
             elif os.path.isdir(srcname):
-                copytree(srcname, dstname, symlinks, ignore)
+                copytree(srcname, dstname, symlinks, ignore, copy_function)
             else:
                 # Will raise a SpecialFileError for unsupported file types
-                copy2(srcname, dstname)
+                copy_function(srcname, dstname)
         # catch the Error from the recursive copytree so that we can
         # continue with other files
         except Error, err:
@@ -205,7 +242,7 @@
         else:
             errors.extend((src, dst, str(why)))
     if errors:
-        raise Error, errors
+        raise Error(errors)
 
 def rmtree(path, ignore_errors=False, onerror=None):
     """Recursively delete a directory tree.
@@ -235,7 +272,7 @@
     names = []
     try:
         names = os.listdir(path)
-    except os.error, err:
+    except os.error:
         onerror(os.listdir, path, sys.exc_info())
     for name in names:
         fullname = os.path.join(path, name)
@@ -248,7 +285,7 @@
         else:
             try:
                 os.remove(fullname)
-            except os.error, err:
+            except os.error:
                 onerror(os.remove, fullname, sys.exc_info())
     try:
         os.rmdir(path)
@@ -282,13 +319,13 @@
     if os.path.isdir(dst):
         real_dst = os.path.join(dst, _basename(src))
         if os.path.exists(real_dst):
-            raise Error, "Destination path '%s' already exists" % real_dst
+            raise Error("Destination path '%s' already exists" % real_dst)
     try:
         os.rename(src, real_dst)
     except OSError:
         if os.path.isdir(src):
             if _destinsrc(src, dst):
-                raise Error, "Cannot move a directory '%s' into itself '%s'." % (src, dst)
+                raise Error("Cannot move a directory '%s' into itself '%s'." % (src, dst))
             copytree(src, real_dst, symlinks=True)
             rmtree(src)
         else:
@@ -333,40 +370,41 @@
     """Create a (possibly compressed) tar file from all the files under
     'base_dir'.
 
-    'compress' must be "gzip" (the default), "compress", "bzip2", or None.
-    (compress will be deprecated in Python 3.2)
+    'compress' must be "gzip" (the default), "bzip2", or None.
 
     'owner' and 'group' can be used to define an owner and a group for the
     archive that is being built. If not provided, the current owner and group
     will be used.
 
     The output tar file will be named 'base_dir' +  ".tar", possibly plus
-    the appropriate compression extension (".gz", ".bz2" or ".Z").
+    the appropriate compression extension (".gz", or ".bz2").
 
     Returns the output filename.
     """
-    tar_compression = {'gzip': 'gz', 'bzip2': 'bz2', None: '', 'compress': ''}
-    compress_ext = {'gzip': '.gz', 'bzip2': '.bz2', 'compress': '.Z'}
+    tar_compression = {'gzip': 'gz', None: ''}
+    compress_ext = {'gzip': '.gz'}
+
+    if _BZ2_SUPPORTED:
+        tar_compression['bzip2'] = 'bz2'
+        compress_ext['bzip2'] = '.bz2'
 
     # flags for compression program, each element of list will be an argument
     if compress is not None and compress not in compress_ext:
-        raise ValueError, \
-              ("bad value for 'compress': must be None, 'gzip', 'bzip2' "
-               "or 'compress'")
+        raise ValueError("bad value for 'compress', or compression format not "
+                         "supported: %s" % compress)
 
-    archive_name = base_name + '.tar'
-    if compress != 'compress':
-        archive_name += compress_ext.get(compress, '')
+    archive_name = base_name + '.tar' + compress_ext.get(compress, '')
+    archive_dir = os.path.dirname(archive_name)
 
-    archive_dir = os.path.dirname(archive_name)
     if not os.path.exists(archive_dir):
         if logger is not None:
-            logger.info("creating %s" % archive_dir)
+            logger.info("creating %s", archive_dir)
         if not dry_run:
             os.makedirs(archive_dir)
 
-
     # creating the tarball
+    # XXX late import because of circular dependency between shutil and
+    # tarfile :(
     from distutils2._backport import tarfile
 
     if logger is not None:
@@ -391,23 +429,9 @@
         finally:
             tar.close()
 
-    # compression using `compress`
-    # XXX this block will be removed in Python 3.2
-    if compress == 'compress':
-        warn("'compress' will be deprecated.", PendingDeprecationWarning)
-        # the option varies depending on the platform
-        compressed_name = archive_name + compress_ext[compress]
-        if sys.platform == 'win32':
-            cmd = [compress, archive_name, compressed_name]
-        else:
-            cmd = [compress, '-f', archive_name]
-        from distutils2.spawn import spawn
-        spawn(cmd, dry_run=dry_run)
-        return compressed_name
-
     return archive_name
 
-def _call_external_zip(directory, verbose=False):
+def _call_external_zip(base_dir, zip_filename, verbose=False, dry_run=False):
     # XXX see if we want to keep an external call here
     if verbose:
         zipoptions = "-r"
@@ -420,8 +444,7 @@
     except DistutilsExecError:
         # XXX really should distinguish between "couldn't find
         # external 'zip' command" and "zip failed".
-        raise ExecError, \
-            ("unable to create zip file '%s': "
+        raise ExecError("unable to create zip file '%s': "
             "could neither import the 'zipfile' module nor "
             "find a standalone zip utility") % zip_filename
 
@@ -451,7 +474,7 @@
         zipfile = None
 
     if zipfile is None:
-        _call_external_zip(base_dir, verbose)
+        _call_external_zip(base_dir, zip_filename, verbose, dry_run)
     else:
         if logger is not None:
             logger.info("creating '%s' and adding '%s' to it",
@@ -475,12 +498,14 @@
 _ARCHIVE_FORMATS = {
     'gztar': (_make_tarball, [('compress', 'gzip')], "gzip'ed tar-file"),
     'bztar': (_make_tarball, [('compress', 'bzip2')], "bzip2'ed tar-file"),
-    'ztar':  (_make_tarball, [('compress', 'compress')],
-                "compressed tar file"),
     'tar':   (_make_tarball, [('compress', None)], "uncompressed tar file"),
-    'zip':   (_make_zipfile, [],"ZIP file")
+    'zip':   (_make_zipfile, [], "ZIP file"),
     }
 
+if _BZ2_SUPPORTED:
+    _ARCHIVE_FORMATS['bztar'] = (_make_tarball, [('compress', 'bzip2')],
+                                "bzip2'ed tar-file")
+
 def get_archive_formats():
     """Returns a list of supported formats for archiving and unarchiving.
 
@@ -507,7 +532,7 @@
     if not isinstance(extra_args, (tuple, list)):
         raise TypeError('extra_args needs to be a sequence')
     for element in extra_args:
-        if not isinstance(element, (tuple, list)) or len(element) !=2 :
+        if not isinstance(element, (tuple, list)) or len(element) !=2:
             raise TypeError('extra_args elements are : (arg_name, value)')
 
     _ARCHIVE_FORMATS[name] = (function, extra_args, description)
@@ -520,7 +545,7 @@
     """Create an archive file (eg. zip or tar).
 
     'base_name' is the name of the file to create, minus any format-specific
-    extension; 'format' is the archive format: one of "zip", "tar", "ztar",
+    extension; 'format' is the archive format: one of "zip", "tar", "bztar"
     or "gztar".
 
     'root_dir' is a directory that will be the root directory of the
@@ -549,7 +574,7 @@
     try:
         format_info = _ARCHIVE_FORMATS[format]
     except KeyError:
-        raise ValueError, "unknown archive format '%s'" % format
+        raise ValueError("unknown archive format '%s'" % format)
 
     func = format_info[0]
     for arg, val in format_info[1]:
@@ -568,3 +593,169 @@
             os.chdir(save_cwd)
 
     return filename
+
+
+def get_unpack_formats():
+    """Returns a list of supported formats for unpacking.
+
+    Each element of the returned sequence is a tuple
+    (name, extensions, description)
+    """
+    formats = [(name, info[0], info[3]) for name, info in
+               _UNPACK_FORMATS.iteritems()]
+    formats.sort()
+    return formats
+
+def _check_unpack_options(extensions, function, extra_args):
+    """Checks what gets registered as an unpacker."""
+    # first make sure no other unpacker is registered for this extension
+    existing_extensions = {}
+    for name, info in _UNPACK_FORMATS.iteritems():
+        for ext in info[0]:
+            existing_extensions[ext] = name
+
+    for extension in extensions:
+        if extension in existing_extensions:
+            msg = '%s is already registered for "%s"'
+            raise RegistryError(msg % (extension,
+                                       existing_extensions[extension]))
+
+    if not callable(function):
+        raise TypeError('The registered function must be a callable')
+
+
+def register_unpack_format(name, extensions, function, extra_args=None,
+                           description=''):
+    """Registers an unpack format.
+
+    `name` is the name of the format. `extensions` is a list of extensions
+    corresponding to the format.
+
+    `function` is the callable that will be
+    used to unpack archives. The callable will receive archives to unpack.
+    If it's unable to handle an archive, it needs to raise a ReadError
+    exception.
+
+    If provided, `extra_args` is a sequence of
+    (name, value) tuples that will be passed as arguments to the callable.
+    description can be provided to describe the format, and will be returned
+    by the get_unpack_formats() function.
+    """
+    if extra_args is None:
+        extra_args = []
+    _check_unpack_options(extensions, function, extra_args)
+    _UNPACK_FORMATS[name] = extensions, function, extra_args, description
+
+def unregister_unpack_format(name):
+    """Removes the pack format from the registery."""
+    del _UNPACK_FORMATS[name]
+
+def _ensure_directory(path):
+    """Ensure that the parent directory of `path` exists"""
+    dirname = os.path.dirname(path)
+    if not os.path.isdir(dirname):
+        os.makedirs(dirname)
+
+def _unpack_zipfile(filename, extract_dir):
+    """Unpack zip `filename` to `extract_dir`
+    """
+    try:
+        import zipfile
+    except ImportError:
+        raise ReadError('zlib not supported, cannot unpack this archive.')
+
+    if not zipfile.is_zipfile(filename):
+        raise ReadError("%s is not a zip file" % filename)
+
+    zip = zipfile.ZipFile(filename)
+    try:
+        for info in zip.infolist():
+            name = info.filename
+
+            # don't extract absolute paths or ones with .. in them
+            if name.startswith('/') or '..' in name:
+                continue
+
+            target = os.path.join(extract_dir, *name.split('/'))
+            if not target:
+                continue
+
+            _ensure_directory(target)
+            if not name.endswith('/'):
+                # file
+                data = zip.read(info.filename)
+                f = open(target, 'wb')
+                try:
+                    f.write(data)
+                finally:
+                    f.close()
+                    del data
+    finally:
+        zip.close()
+
+def _unpack_tarfile(filename, extract_dir):
+    """Unpack tar/tar.gz/tar.bz2 `filename` to `extract_dir`
+    """
+    from distutils2._backport import tarfile
+    try:
+        tarobj = tarfile.open(filename)
+    except tarfile.TarError:
+        raise ReadError(
+            "%s is not a compressed or uncompressed tar file" % filename)
+    try:
+        tarobj.extractall(extract_dir)
+    finally:
+        tarobj.close()
+
+_UNPACK_FORMATS = {
+    'gztar': (['.tar.gz', '.tgz'], _unpack_tarfile, [], "gzip'ed tar-file"),
+    'tar':   (['.tar'], _unpack_tarfile, [], "uncompressed tar file"),
+    'zip':   (['.zip'], _unpack_zipfile, [], "ZIP file")
+    }
+
+if _BZ2_SUPPORTED:
+    _UNPACK_FORMATS['bztar'] = (['.bz2'], _unpack_tarfile, [],
+                                "bzip2'ed tar-file")
+
+def _find_unpack_format(filename):
+    for name, info in _UNPACK_FORMATS.iteritems():
+        for extension in info[0]:
+            if filename.endswith(extension):
+                return name
+    return None
+
+def unpack_archive(filename, extract_dir=None, format=None):
+    """Unpack an archive.
+
+    `filename` is the name of the archive.
+
+    `extract_dir` is the name of the target directory, where the archive
+    is unpacked. If not provided, the current working directory is used.
+
+    `format` is the archive format: one of "zip", "tar", or "gztar". Or any
+    other registered format. If not provided, unpack_archive will use the
+    filename extension and see if an unpacker was registered for that
+    extension.
+
+    In case none is found, a ValueError is raised.
+    """
+    if extract_dir is None:
+        extract_dir = os.getcwd()
+
+    if format is not None:
+        try:
+            format_info = _UNPACK_FORMATS[format]
+        except KeyError:
+            raise ValueError("Unknown unpack format '{0}'".format(format))
+
+        func = format_info[0]
+        func(filename, extract_dir, **dict(format_info[1]))
+    else:
+        # we need to look at the registered unpackers supported extensions
+        format = _find_unpack_format(filename)
+        if format is None:
+            raise ReadError("Unknown archive format '{0}'".format(filename))
+
+        func = _UNPACK_FORMATS[format][1]
+        kwargs = dict(_UNPACK_FORMATS[format][2])
+    raise ValueError('Unknown archive format: %s' % filename)
diff --git a/distutils2/_backport/tests/test_pkgutil.py b/distutils2/_backport/tests/test_pkgutil.py
--- a/distutils2/_backport/tests/test_pkgutil.py
+++ b/distutils2/_backport/tests/test_pkgutil.py
@@ -12,10 +12,15 @@
 except ImportError:
     from distutils2._backport.hashlib import md5
 
-from test.test_support import TESTFN
+from distutils2.errors import DistutilsError
+from distutils2.metadata import DistributionMetadata
+from distutils2.tests import unittest, run_unittest, support
 
-from distutils2.tests import unittest, run_unittest, support
 from distutils2._backport import pkgutil
+from distutils2._backport.pkgutil import (
+    Distribution, EggInfoDistribution, get_distribution, get_distributions,
+    provides_distribution, obsoletes_distribution, get_file_users,
+    distinfo_dirname, _yield_distributions)
 
 try:
     from os.path import relpath
@@ -108,6 +113,12 @@
         self.assertEqual(res1, RESOURCE_DATA)
         res2 = pkgutil.get_data(pkg, 'sub/res.txt')
         self.assertEqual(res2, RESOURCE_DATA)
+
+        names = []
+        for loader, name, ispkg in pkgutil.iter_modules([zip_file]):
+            names.append(name)
+        self.assertEqual(names, ['test_getdata_zipfile'])
+
         del sys.path[0]
 
         del sys.modules[pkg]
@@ -205,7 +216,7 @@
                 record_writer.writerow(record_pieces(
                     os.path.join(distinfo_dir, file)))
             record_writer.writerow([relpath(record_file, sys.prefix)])
-            del record_writer # causes the RECORD file to close
+            del record_writer  # causes the RECORD file to close
             record_reader = csv.reader(open(record_file, 'rb'))
             record_data = []
             for row in record_reader:
@@ -225,9 +236,6 @@
     def test_instantiation(self):
         # Test the Distribution class's instantiation provides us with usable
         # attributes.
-        # Import the Distribution class
-        from distutils2._backport.pkgutil import distinfo_dirname, Distribution
-
         here = os.path.abspath(os.path.dirname(__file__))
         name = 'choxie'
         version = '2.0.0.9'
@@ -236,7 +244,6 @@
         dist = Distribution(dist_path)
 
         self.assertEqual(dist.name, name)
-        from distutils2.metadata import DistributionMetadata
         self.assertTrue(isinstance(dist.metadata, DistributionMetadata))
         self.assertEqual(dist.metadata['version'], version)
         self.assertTrue(isinstance(dist.requested, type(bool())))
@@ -244,7 +251,6 @@
     def test_installed_files(self):
         # Test the iteration of installed files.
         # Test the distribution's installed files
-        from distutils2._backport.pkgutil import Distribution
         for distinfo_dir in self.distinfo_dirs:
             dist = Distribution(distinfo_dir)
             for path, md5_, size in dist.get_installed_files():
@@ -267,14 +273,12 @@
         false_path = relpath(os.path.join(*false_path), sys.prefix)
 
         # Test if the distribution uses the file in question
-        from distutils2._backport.pkgutil import Distribution
         dist = Distribution(distinfo_dir)
         self.assertTrue(dist.uses(true_path))
         self.assertFalse(dist.uses(false_path))
 
     def test_get_distinfo_file(self):
         # Test the retrieval of dist-info file objects.
-        from distutils2._backport.pkgutil import Distribution
         distinfo_name = 'choxie-2.0.0.9'
         other_distinfo_name = 'grammar-1.0a4'
         distinfo_dir = os.path.join(self.fake_dists_path,
@@ -295,7 +299,6 @@
             # Is it the correct file?
             self.assertEqual(value.name, os.path.join(distinfo_dir, distfile))
 
-        from distutils2.errors import DistutilsError
         # Test an absolute path that is part of another distributions dist-info
         other_distinfo_file = os.path.join(self.fake_dists_path,
             other_distinfo_name + '.dist-info', 'REQUESTED')
@@ -307,7 +310,6 @@
 
     def test_get_distinfo_files(self):
         # Test for the iteration of RECORD path entries.
-        from distutils2._backport.pkgutil import Distribution
         distinfo_name = 'towel_stuff-0.1'
         distinfo_dir = os.path.join(self.fake_dists_path,
             distinfo_name + '.dist-info')
@@ -345,7 +347,7 @@
         # Given a name and a version, we expect the distinfo_dirname function
         # to return a standard distribution information directory name.
 
-        items = [# (name, version, standard_dirname)
+        items = [  # (name, version, standard_dirname)
             # Test for a very simple single word name and decimal
             # version number
             ('docutils', '0.5', 'docutils-0.5.dist-info'),
@@ -356,9 +358,6 @@
             ('python-ldap', '2.5 a---5', 'python_ldap-2.5 a---5.dist-info'),
             ]
 
-        # Import the function in question
-        from distutils2._backport.pkgutil import distinfo_dirname
-
         # Loop through the items to validate the results
         for name, version, standard_dirname in items:
             dirname = distinfo_dirname(name, version)
@@ -371,11 +370,6 @@
             ('towel-stuff', '0.1')]
         found_dists = []
 
-        # Import the function in question
-        from distutils2._backport.pkgutil import get_distributions, \
-                                                 Distribution, \
-                                                 EggInfoDistribution
-
         # Verify the fake dists have been found.
         dists = [dist for dist in get_distributions()]
         for dist in dists:
@@ -416,12 +410,7 @@
     def test_get_distribution(self):
         # Test for looking up a distribution by name.
         # Test the lookup of the towel-stuff distribution
-        name = 'towel-stuff' # Note: This is different from the directory name
-
-        # Import the function in question
-        from distutils2._backport.pkgutil import get_distribution, \
-                                                 Distribution, \
-                                                 EggInfoDistribution
+        name = 'towel-stuff'  # Note: This is different from the directory name
 
         # Lookup the distribution
         dist = get_distribution(name)
@@ -461,7 +450,6 @@
 
     def test_get_file_users(self):
         # Test the iteration of distributions that use a file.
-        from distutils2._backport.pkgutil import get_file_users, Distribution
         name = 'towel_stuff-0.1'
         path = os.path.join(self.fake_dists_path, name,
             'towel_stuff', '__init__.py')
@@ -471,9 +459,6 @@
 
     def test_provides(self):
         # Test for looking up distributions by what they provide
-        from distutils2._backport.pkgutil import provides_distribution
-        from distutils2.errors import DistutilsError
-
         checkLists = lambda x, y: self.assertListEqual(sorted(x), sorted(y))
 
         l = [dist.name for dist in provides_distribution('truffles')]
@@ -522,12 +507,10 @@
                                                          use_egg_info=True)]
         checkLists(l, ['strawberry'])
 
-
         l = [dist.name for dist in provides_distribution('strawberry', '>0.6',
                                                          use_egg_info=True)]
         checkLists(l, [])
 
-
         l = [dist.name for dist in provides_distribution('banana', '0.4',
                                                          use_egg_info=True)]
         checkLists(l, ['banana'])
@@ -536,16 +519,12 @@
                                                          use_egg_info=True)]
         checkLists(l, ['banana'])
 
-
         l = [dist.name for dist in provides_distribution('banana', '!=0.4',
                                                          use_egg_info=True)]
         checkLists(l, [])
 
     def test_obsoletes(self):
         # Test looking for distributions based on what they obsolete
-        from distutils2._backport.pkgutil import obsoletes_distribution
-        from distutils2.errors import DistutilsError
-
         checkLists = lambda x, y: self.assertListEqual(sorted(x), sorted(y))
 
         l = [dist.name for dist in obsoletes_distribution('truffles', '1.0')]
@@ -555,7 +534,6 @@
                                                           use_egg_info=True)]
         checkLists(l, ['cheese', 'bacon'])
 
-
         l = [dist.name for dist in obsoletes_distribution('truffles', '0.8')]
         checkLists(l, ['choxie'])
 
@@ -575,7 +553,6 @@
 
     def test_yield_distribution(self):
         # tests the internal function _yield_distributions
-        from distutils2._backport.pkgutil import _yield_distributions
         checkLists = lambda x, y: self.assertListEqual(sorted(x), sorted(y))
 
         eggs = [('bacon', '0.1'), ('banana', '0.4'), ('strawberry', '0.6'),
diff --git a/distutils2/_backport/tests/test_shutil.py b/distutils2/_backport/tests/test_shutil.py
new file mode 100644
--- /dev/null
+++ b/distutils2/_backport/tests/test_shutil.py
@@ -0,0 +1,945 @@
+import os
+import sys
+import tempfile
+import stat
+import tarfile
+from os.path import splitdrive
+from StringIO import StringIO
+
+from distutils.spawn import find_executable, spawn
+from distutils2._backport import shutil
+from distutils2._backport.shutil import (
+    _make_tarball, _make_zipfile, make_archive, unpack_archive,
+    register_archive_format, unregister_archive_format, get_archive_formats,
+    register_unpack_format, unregister_unpack_format, get_unpack_formats,
+    Error, RegistryError)
+
+from distutils2.tests import unittest, support, TESTFN
+
+try:
+    import bz2
+    BZ2_SUPPORTED = True
+except ImportError:
+    BZ2_SUPPORTED = False
+
+TESTFN2 = TESTFN + "2"
+
+try:
+    import grp
+    import pwd
+    UID_GID_SUPPORT = True
+except ImportError:
+    UID_GID_SUPPORT = False
+
+try:
+    import zlib
+except ImportError:
+    zlib = None
+
+try:
+    import zipfile
+    ZIP_SUPPORT = True
+except ImportError:
+    ZIP_SUPPORT = find_executable('zip')
+
+class TestShutil(unittest.TestCase):
+
+    def setUp(self):
+        super(TestShutil, self).setUp()
+        self.tempdirs = []
+
+    def tearDown(self):
+        super(TestShutil, self).tearDown()
+        while self.tempdirs:
+            d = self.tempdirs.pop()
+            shutil.rmtree(d, os.name in ('nt', 'cygwin'))
+
+    def write_file(self, path, content='xxx'):
+        """Writes a file in the given path.
+
+
+        path can be a string or a sequence.
+        """
+        if isinstance(path, (list, tuple)):
+            path = os.path.join(*path)
+        f = open(path, 'w')
+        try:
+            f.write(content)
+        finally:
+            f.close()
+
+    def mkdtemp(self):
+        """Create a temporary directory that will be cleaned up.
+
+        Returns the path of the directory.
+        """
+        d = tempfile.mkdtemp()
+        self.tempdirs.append(d)
+        return d
+
+    def test_rmtree_errors(self):
+        # filename is guaranteed not to exist
+        filename = tempfile.mktemp()
+        self.assertRaises(OSError, shutil.rmtree, filename)
+
+    # See bug #1071513 for why we don't run this on cygwin
+    # and bug #1076467 for why we don't run this as root.
+    if (hasattr(os, 'chmod') and sys.platform[:6] != 'cygwin'
+        and not (hasattr(os, 'geteuid') and os.geteuid() == 0)):
+        def test_on_error(self):
+            self.errorState = 0
+            os.mkdir(TESTFN)
+            self.childpath = os.path.join(TESTFN, 'a')
+            f = open(self.childpath, 'w')
+            f.close()
+            old_dir_mode = os.stat(TESTFN).st_mode
+            old_child_mode = os.stat(self.childpath).st_mode
+            # Make unwritable.
+            os.chmod(self.childpath, stat.S_IREAD)
+            os.chmod(TESTFN, stat.S_IREAD)
+
+            shutil.rmtree(TESTFN, onerror=self.check_args_to_onerror)
+            # Test whether onerror has actually been called.
+            self.assertEqual(self.errorState, 2,
+                             "Expected call to onerror function did not happen.")
+
+            # Make writable again.
+            os.chmod(TESTFN, old_dir_mode)
+            os.chmod(self.childpath, old_child_mode)
+
+            # Clean up.
+            shutil.rmtree(TESTFN)
+
+    def check_args_to_onerror(self, func, arg, exc):
+        # test_rmtree_errors deliberately runs rmtree
+        # on a directory that is chmod 400, which will fail.
+        # This function is run when shutil.rmtree fails.
+        # 99.9% of the time it initially fails to remove
+        # a file in the directory, so the first time through
+        # func is os.remove.
+        # However, some Linux machines running ZFS on
+        # FUSE experienced a failure earlier in the process
+        # at os.listdir.  The first failure may legally
+        # be either.
+        if self.errorState == 0:
+            if func is os.remove:
+                self.assertEqual(arg, self.childpath)
+            else:
+                self.assertIs(func, os.listdir,
+                              "func must be either os.remove or os.listdir")
+                self.assertEqual(arg, TESTFN)
+            self.assertTrue(issubclass(exc[0], OSError))
+            self.errorState = 1
+        else:
+            self.assertEqual(func, os.rmdir)
+            self.assertEqual(arg, TESTFN)
+            self.assertTrue(issubclass(exc[0], OSError))
+            self.errorState = 2
+
+    def test_rmtree_dont_delete_file(self):
+        # When called on a file instead of a directory, don't delete it.
+        handle, path = tempfile.mkstemp()
+        os.fdopen(handle).close()
+        self.assertRaises(OSError, shutil.rmtree, path)
+        os.remove(path)
+
+    def _write_data(self, path, data):
+        f = open(path, "w")
+        f.write(data)
+        f.close()
+
+    def test_copytree_simple(self):
+
+        def read_data(path):
+            f = open(path)
+            data = f.read()
+            f.close()
+            return data
+
+        src_dir = tempfile.mkdtemp()
+        dst_dir = os.path.join(tempfile.mkdtemp(), 'destination')
+        self._write_data(os.path.join(src_dir, 'test.txt'), '123')
+        os.mkdir(os.path.join(src_dir, 'test_dir'))
+        self._write_data(os.path.join(src_dir, 'test_dir', 'test.txt'), '456')
+
+        try:
+            shutil.copytree(src_dir, dst_dir)
+            self.assertTrue(os.path.isfile(os.path.join(dst_dir, 'test.txt')))
+            self.assertTrue(os.path.isdir(os.path.join(dst_dir, 'test_dir')))
+            self.assertTrue(os.path.isfile(os.path.join(dst_dir, 'test_dir',
+                                                        'test.txt')))
+            actual = read_data(os.path.join(dst_dir, 'test.txt'))
+            self.assertEqual(actual, '123')
+            actual = read_data(os.path.join(dst_dir, 'test_dir', 'test.txt'))
+            self.assertEqual(actual, '456')
+        finally:
+            for path in (
+                    os.path.join(src_dir, 'test.txt'),
+                    os.path.join(dst_dir, 'test.txt'),
+                    os.path.join(src_dir, 'test_dir', 'test.txt'),
+                    os.path.join(dst_dir, 'test_dir', 'test.txt'),
+                ):
+                if os.path.exists(path):
+                    os.remove(path)
+            for path in (src_dir,
+                    os.path.dirname(dst_dir)
+                ):
+                if os.path.exists(path):
+                    shutil.rmtree(path)
+
+    def test_copytree_with_exclude(self):
+
+        def read_data(path):
+            f = open(path)
+            data = f.read()
+            f.close()
+            return data
+
+        # creating data
+        join = os.path.join
+        exists = os.path.exists
+        src_dir = tempfile.mkdtemp()
+        try:
+            dst_dir = join(tempfile.mkdtemp(), 'destination')
+            self._write_data(join(src_dir, 'test.txt'), '123')
+            self._write_data(join(src_dir, 'test.tmp'), '123')
+            os.mkdir(join(src_dir, 'test_dir'))
+            self._write_data(join(src_dir, 'test_dir', 'test.txt'), '456')
+            os.mkdir(join(src_dir, 'test_dir2'))
+            self._write_data(join(src_dir, 'test_dir2', 'test.txt'), '456')
+            os.mkdir(join(src_dir, 'test_dir2', 'subdir'))
+            os.mkdir(join(src_dir, 'test_dir2', 'subdir2'))
+            self._write_data(join(src_dir, 'test_dir2', 'subdir', 'test.txt'),
+                             '456')
+            self._write_data(join(src_dir, 'test_dir2', 'subdir2', 'test.py'),
+                             '456')
+
+
+            # testing glob-like patterns
+            try:
+                patterns = shutil.ignore_patterns('*.tmp', 'test_dir2')
+                shutil.copytree(src_dir, dst_dir, ignore=patterns)
+                # checking the result: some elements should not be copied
+                self.assertTrue(exists(join(dst_dir, 'test.txt')))
+                self.assertTrue(not exists(join(dst_dir, 'test.tmp')))
+                self.assertTrue(not exists(join(dst_dir, 'test_dir2')))
+            finally:
+                if os.path.exists(dst_dir):
+                    shutil.rmtree(dst_dir)
+            try:
+                patterns = shutil.ignore_patterns('*.tmp', 'subdir*')
+                shutil.copytree(src_dir, dst_dir, ignore=patterns)
+                # checking the result: some elements should not be copied
+                self.assertTrue(not exists(join(dst_dir, 'test.tmp')))
+                self.assertTrue(not exists(join(dst_dir, 'test_dir2', 'subdir2')))
+                self.assertTrue(not exists(join(dst_dir, 'test_dir2', 'subdir')))
+            finally:
+                if os.path.exists(dst_dir):
+                    shutil.rmtree(dst_dir)
+
+            # testing callable-style
+            try:
+                def _filter(src, names):
+                    res = []
+                    for name in names:
+                        path = os.path.join(src, name)
+
+                        if (os.path.isdir(path) and
+                            path.split()[-1] == 'subdir'):
+                            res.append(name)
+                        elif os.path.splitext(path)[-1] in ('.py'):
+                            res.append(name)
+                    return res
+
+                shutil.copytree(src_dir, dst_dir, ignore=_filter)
+
+                # checking the result: some elements should not be copied
+                self.assertTrue(not exists(join(dst_dir, 'test_dir2', 'subdir2',
+                                        'test.py')))
+                self.assertTrue(not exists(join(dst_dir, 'test_dir2', 'subdir')))
+
+            finally:
+                if os.path.exists(dst_dir):
+                    shutil.rmtree(dst_dir)
+        finally:
+            shutil.rmtree(src_dir)
+            shutil.rmtree(os.path.dirname(dst_dir))
+
+    @support.skip_unless_symlink
+    def test_dont_copy_file_onto_link_to_itself(self):
+        # bug 851123.
+        os.mkdir(TESTFN)
+        src = os.path.join(TESTFN, 'cheese')
+        dst = os.path.join(TESTFN, 'shop')
+        try:
+            f = open(src, 'w')
+            f.write('cheddar')
+            f.close()
+
+            if hasattr(os, "link"):
+                os.link(src, dst)
+                self.assertRaises(shutil.Error, shutil.copyfile, src, dst)
+                f = open(src, 'r')
+                try:
+                    self.assertEqual(f.read(), 'cheddar')
+                finally:
+                    f.close()
+                os.remove(dst)
+
+            # Using `src` here would mean we end up with a symlink pointing
+            # to TESTFN/TESTFN/cheese, while it should point at
+            # TESTFN/cheese.
+            os.symlink('cheese', dst)
+            self.assertRaises(shutil.Error, shutil.copyfile, src, dst)
+            f = open(src, 'r')
+            try:
+                self.assertEqual(f.read(), 'cheddar')
+            finally:
+                f.close()
+            os.remove(dst)
+        finally:
+            try:
+                shutil.rmtree(TESTFN)
+            except OSError:
+                pass
+
+    @support.skip_unless_symlink
+    def test_rmtree_on_symlink(self):
+        # bug 1669.
+        os.mkdir(TESTFN)
+        try:
+            src = os.path.join(TESTFN, 'cheese')
+            dst = os.path.join(TESTFN, 'shop')
+            os.mkdir(src)
+            os.symlink(src, dst)
+            self.assertRaises(OSError, shutil.rmtree, dst)
+        finally:
+            shutil.rmtree(TESTFN, ignore_errors=True)
+
+    if hasattr(os, "mkfifo"):
+        # Issue #3002: copyfile and copytree block indefinitely on named pipes
+        def test_copyfile_named_pipe(self):
+            os.mkfifo(TESTFN)
+            try:
+                self.assertRaises(shutil.SpecialFileError,
+                                  shutil.copyfile, TESTFN, TESTFN2)
+                self.assertRaises(shutil.SpecialFileError,
+                                  shutil.copyfile, __file__, TESTFN)
+            finally:
+                os.remove(TESTFN)
+
+    @unittest.skipUnless(hasattr(os, 'mkfifo'), 'requires os.mkfifo')
+    def test_copytree_named_pipe(self):
+        os.mkdir(TESTFN)
+        try:
+            subdir = os.path.join(TESTFN, "subdir")
+            os.mkdir(subdir)
+            pipe = os.path.join(subdir, "mypipe")
+            os.mkfifo(pipe)
+            try:
+                shutil.copytree(TESTFN, TESTFN2)
+            except shutil.Error, e:
+                errors = e.args[0]
+                self.assertEqual(len(errors), 1)
+                src, dst, error_msg = errors[0]
+                self.assertEqual("`%s` is a named pipe" % pipe, error_msg)
+            else:
+                self.fail("shutil.Error should have been raised")
+        finally:
+            shutil.rmtree(TESTFN, ignore_errors=True)
+            shutil.rmtree(TESTFN2, ignore_errors=True)
+
+    def test_copytree_special_func(self):
+
+        src_dir = self.mkdtemp()
+        dst_dir = os.path.join(self.mkdtemp(), 'destination')
+        self._write_data(os.path.join(src_dir, 'test.txt'), '123')
+        os.mkdir(os.path.join(src_dir, 'test_dir'))
+        self._write_data(os.path.join(src_dir, 'test_dir', 'test.txt'), '456')
+
+        copied = []
+        def _copy(src, dst):
+            copied.append((src, dst))
+
+        shutil.copytree(src_dir, dst_dir, copy_function=_copy)
+        self.assertEquals(len(copied), 2)
+
+    @support.skip_unless_symlink
+    def test_copytree_dangling_symlinks(self):
+
+        # a dangling symlink raises an error at the end
+        src_dir = self.mkdtemp()
+        dst_dir = os.path.join(self.mkdtemp(), 'destination')
+        os.symlink('IDONTEXIST', os.path.join(src_dir, 'test.txt'))
+        os.mkdir(os.path.join(src_dir, 'test_dir'))
+        self._write_data(os.path.join(src_dir, 'test_dir', 'test.txt'), '456')
+        self.assertRaises(Error, shutil.copytree, src_dir, dst_dir)
+
+        # a dangling symlink is ignored with the proper flag
+        dst_dir = os.path.join(self.mkdtemp(), 'destination2')
+        shutil.copytree(src_dir, dst_dir, ignore_dangling_symlinks=True)
+        self.assertNotIn('test.txt', os.listdir(dst_dir))
+
+        # a dangling symlink is copied if symlinks=True
+        dst_dir = os.path.join(self.mkdtemp(), 'destination3')
+        shutil.copytree(src_dir, dst_dir, symlinks=True)
+        self.assertIn('test.txt', os.listdir(dst_dir))
+
+    @unittest.skipUnless(zlib, "requires zlib")
+    def test_make_tarball(self):
+        # creating something to tar
+        tmpdir = self.mkdtemp()
+        self.write_file([tmpdir, 'file1'], 'xxx')
+        self.write_file([tmpdir, 'file2'], 'xxx')
+        os.mkdir(os.path.join(tmpdir, 'sub'))
+        self.write_file([tmpdir, 'sub', 'file3'], 'xxx')
+
+        tmpdir2 = self.mkdtemp()
+        unittest.skipUnless(splitdrive(tmpdir)[0] == splitdrive(tmpdir2)[0],
+                            "source and target should be on same drive")
+
+        base_name = os.path.join(tmpdir2, 'archive')
+
+        # working with relative paths to avoid tar warnings
+        old_dir = os.getcwd()
+        os.chdir(tmpdir)
+        try:
+            _make_tarball(splitdrive(base_name)[1], '.')
+        finally:
+            os.chdir(old_dir)
+
+        # check if the compressed tarball was created
+        tarball = base_name + '.tar.gz'
+        self.assertTrue(os.path.exists(tarball))
+
+        # trying an uncompressed one
+        base_name = os.path.join(tmpdir2, 'archive')
+        old_dir = os.getcwd()
+        os.chdir(tmpdir)
+        try:
+            _make_tarball(splitdrive(base_name)[1], '.', compress=None)
+        finally:
+            os.chdir(old_dir)
+        tarball = base_name + '.tar'
+        self.assertTrue(os.path.exists(tarball))
+
+    def _tarinfo(self, path):
+        tar = tarfile.open(path)
+        try:
+            names = tar.getnames()
+            names.sort()
+            return tuple(names)
+        finally:
+            tar.close()
+
+    def _create_files(self):
+        # creating something to tar
+        tmpdir = self.mkdtemp()
+        dist = os.path.join(tmpdir, 'dist')
+        os.mkdir(dist)
+        self.write_file([dist, 'file1'], 'xxx')
+        self.write_file([dist, 'file2'], 'xxx')
+        os.mkdir(os.path.join(dist, 'sub'))
+        self.write_file([dist, 'sub', 'file3'], 'xxx')
+        os.mkdir(os.path.join(dist, 'sub2'))
+        tmpdir2 = self.mkdtemp()
+        base_name = os.path.join(tmpdir2, 'archive')
+        return tmpdir, tmpdir2, base_name
+
+    @unittest.skipUnless(zlib, "Requires zlib")
+    @unittest.skipUnless(find_executable('tar') and find_executable('gzip'),
+                         'Need the tar command to run')
+    def test_tarfile_vs_tar(self):
+        tmpdir, tmpdir2, base_name =  self._create_files()
+        old_dir = os.getcwd()
+        os.chdir(tmpdir)
+        try:
+            _make_tarball(base_name, 'dist')
+        finally:
+            os.chdir(old_dir)
+
+        # check if the compressed tarball was created
+        tarball = base_name + '.tar.gz'
+        self.assertTrue(os.path.exists(tarball))
+
+        # now create another tarball using `tar`
+        tarball2 = os.path.join(tmpdir, 'archive2.tar.gz')
+        tar_cmd = ['tar', '-cf', 'archive2.tar', 'dist']
+        gzip_cmd = ['gzip', '-f9', 'archive2.tar']
+        old_dir = os.getcwd()
+        old_stdout = sys.stdout
+        os.chdir(tmpdir)
+        sys.stdout = StringIO()
+
+        try:
+            spawn(tar_cmd)
+            spawn(gzip_cmd)
+        finally:
+            os.chdir(old_dir)
+            sys.stdout = old_stdout
+
+        self.assertTrue(os.path.exists(tarball2))
+        # let's compare both tarballs
+        self.assertEquals(self._tarinfo(tarball), self._tarinfo(tarball2))
+
+        # trying an uncompressed one
+        base_name = os.path.join(tmpdir2, 'archive')
+        old_dir = os.getcwd()
+        os.chdir(tmpdir)
+        try:
+            _make_tarball(base_name, 'dist', compress=None)
+        finally:
+            os.chdir(old_dir)
+        tarball = base_name + '.tar'
+        self.assertTrue(os.path.exists(tarball))
+
+        # now for a dry_run
+        base_name = os.path.join(tmpdir2, 'archive')
+        old_dir = os.getcwd()
+        os.chdir(tmpdir)
+        try:
+            _make_tarball(base_name, 'dist', compress=None, dry_run=True)
+        finally:
+            os.chdir(old_dir)
+        tarball = base_name + '.tar'
+        self.assertTrue(os.path.exists(tarball))
+
+    @unittest.skipUnless(zlib, "Requires zlib")
+    @unittest.skipUnless(ZIP_SUPPORT, 'Need zip support to run')
+    def test_make_zipfile(self):
+        # creating something to tar
+        tmpdir = self.mkdtemp()
+        self.write_file([tmpdir, 'file1'], 'xxx')
+        self.write_file([tmpdir, 'file2'], 'xxx')
+
+        tmpdir2 = self.mkdtemp()
+        base_name = os.path.join(tmpdir2, 'archive')
+        _make_zipfile(base_name, tmpdir)
+
+        # check if the compressed tarball was created
+        tarball = base_name + '.zip'
+        self.assertTrue(os.path.exists(tarball))
+
+
+    def test_make_archive(self):
+        tmpdir = self.mkdtemp()
+        base_name = os.path.join(tmpdir, 'archive')
+        self.assertRaises(ValueError, make_archive, base_name, 'xxx')
+
+    @unittest.skipUnless(zlib, "Requires zlib")
+    def test_make_archive_owner_group(self):
+        # testing make_archive with owner and group, with various combinations
+        # this works even if there's not gid/uid support
+        if UID_GID_SUPPORT:
+            group = grp.getgrgid(0)[0]
+            owner = pwd.getpwuid(0)[0]
+        else:
+            group = owner = 'root'
+
+        base_dir, root_dir, base_name =  self._create_files()
+        base_name = os.path.join(self.mkdtemp() , 'archive')
+        res = make_archive(base_name, 'zip', root_dir, base_dir, owner=owner,
+                           group=group)
+        self.assertTrue(os.path.exists(res))
+
+        res = make_archive(base_name, 'zip', root_dir, base_dir)
+        self.assertTrue(os.path.exists(res))
+
+        res = make_archive(base_name, 'tar', root_dir, base_dir,
+                           owner=owner, group=group)
+        self.assertTrue(os.path.exists(res))
+
+        res = make_archive(base_name, 'tar', root_dir, base_dir,
+                           owner='kjhkjhkjg', group='oihohoh')
+        self.assertTrue(os.path.exists(res))
+
+
+    @unittest.skipUnless(zlib, "Requires zlib")
+    @unittest.skipUnless(UID_GID_SUPPORT, "Requires grp and pwd support")
+    def test_tarfile_root_owner(self):
+        tmpdir, tmpdir2, base_name =  self._create_files()
+        old_dir = os.getcwd()
+        os.chdir(tmpdir)
+        group = grp.getgrgid(0)[0]
+        owner = pwd.getpwuid(0)[0]
+        try:
+            archive_name = _make_tarball(base_name, 'dist', compress=None,
+                                         owner=owner, group=group)
+        finally:
+            os.chdir(old_dir)
+
+        # check if the compressed tarball was created
+        self.assertTrue(os.path.exists(archive_name))
+
+        # now checks the rights
+        archive = tarfile.open(archive_name)
+        try:
+            for member in archive.getmembers():
+                self.assertEquals(member.uid, 0)
+                self.assertEquals(member.gid, 0)
+        finally:
+            archive.close()
+
+    def test_make_archive_cwd(self):
+        current_dir = os.getcwd()
+        def _breaks(*args, **kw):
+            raise RuntimeError()
+
+        register_archive_format('xxx', _breaks, [], 'xxx file')
+        try:
+            try:
+                make_archive('xxx', 'xxx', root_dir=self.mkdtemp())
+            except Exception:
+                pass
+            self.assertEquals(os.getcwd(), current_dir)
+        finally:
+            unregister_archive_format('xxx')
+
+    def test_register_archive_format(self):
+
+        self.assertRaises(TypeError, register_archive_format, 'xxx', 1)
+        self.assertRaises(TypeError, register_archive_format, 'xxx', lambda: x,
+                          1)
+        self.assertRaises(TypeError, register_archive_format, 'xxx', lambda: x,
+                          [(1, 2), (1, 2, 3)])
+
+        register_archive_format('xxx', lambda: x, [(1, 2)], 'xxx file')
+        formats = [name for name, params in get_archive_formats()]
+        self.assertIn('xxx', formats)
+
+        unregister_archive_format('xxx')
+        formats = [name for name, params in get_archive_formats()]
+        self.assertNotIn('xxx', formats)
+
+    def _compare_dirs(self, dir1, dir2):
+        # check that dir1 and dir2 are equivalent,
+        # return the diff
+        diff = []
+        for root, dirs, files in os.walk(dir1):
+            for file_ in files:
+                path = os.path.join(root, file_)
+                target_path = os.path.join(dir2, os.path.split(path)[-1])
+                if not os.path.exists(target_path):
+                    diff.append(file_)
+        return diff
+
+    @unittest.skipUnless(zlib, "Requires zlib")
+    def test_unpack_archive(self):
+        formats = ['tar', 'gztar', 'zip']
+        if BZ2_SUPPORTED:
+            formats.append('bztar')
+
+        for format in formats:
+            tmpdir = self.mkdtemp()
+            base_dir, root_dir, base_name =  self._create_files()
+            tmpdir2 = self.mkdtemp()
+            filename = make_archive(base_name, format, root_dir, base_dir)
+
+            # let's try to unpack it now
+            unpack_archive(filename, tmpdir2)
+            diff = self._compare_dirs(tmpdir, tmpdir2)
+            self.assertEquals(diff, [])
+
+    def test_unpack_registery(self):
+
+        formats = get_unpack_formats()
+
+        def _boo(filename, extract_dir, extra):
+            self.assertEquals(extra, 1)
+            self.assertEquals(filename, 'stuff.boo')
+            self.assertEquals(extract_dir, 'xx')
+
+        register_unpack_format('Boo', ['.boo', '.b2'], _boo, [('extra', 1)])
+        unpack_archive('stuff.boo', 'xx')
+
+        # trying to register a .boo unpacker again
+        self.assertRaises(RegistryError, register_unpack_format, 'Boo2',
+                          ['.boo'], _boo)
+
+        # should work now
+        unregister_unpack_format('Boo')
+        register_unpack_format('Boo2', ['.boo'], _boo)
+        self.assertIn(('Boo2', ['.boo'], ''), get_unpack_formats())
+        self.assertNotIn(('Boo', ['.boo'], ''), get_unpack_formats())
+
+        # let's leave a clean state
+        unregister_unpack_format('Boo2')
+        self.assertEquals(get_unpack_formats(), formats)
+
+
+class TestMove(unittest.TestCase):
+
+    def setUp(self):
+        filename = "foo"
+        self.src_dir = tempfile.mkdtemp()
+        self.dst_dir = tempfile.mkdtemp()
+        self.src_file = os.path.join(self.src_dir, filename)
+        self.dst_file = os.path.join(self.dst_dir, filename)
+        # Try to create a dir in the current directory, hoping that it is
+        # not located on the same filesystem as the system tmp dir.
+        try:
+            self.dir_other_fs = tempfile.mkdtemp(
+                dir=os.path.dirname(__file__))
+            self.file_other_fs = os.path.join(self.dir_other_fs,
+                filename)
+        except OSError:
+            self.dir_other_fs = None
+        f = open(self.src_file, "wb")
+        try:
+            f.write("spam")
+        finally:
+            f.close()
+
+    def tearDown(self):
+        for d in (self.src_dir, self.dst_dir, self.dir_other_fs):
+            try:
+                if d:
+                    shutil.rmtree(d)
+            except:
+                pass
+
+    def _check_move_file(self, src, dst, real_dst):
+        f = open(src, "rb")
+        try:
+            contents = f.read()
+        finally:
+            f.close()
+
+        shutil.move(src, dst)
+        f = open(real_dst, "rb")
+        try:
+            self.assertEqual(contents, f.read())
+        finally:
+            f.close()
+
+        self.assertFalse(os.path.exists(src))
+
+    def _check_move_dir(self, src, dst, real_dst):
+        contents = sorted(os.listdir(src))
+        shutil.move(src, dst)
+        self.assertEqual(contents, sorted(os.listdir(real_dst)))
+        self.assertFalse(os.path.exists(src))
+
+    def test_move_file(self):
+        # Move a file to another location on the same filesystem.
+        self._check_move_file(self.src_file, self.dst_file, self.dst_file)
+
+    def test_move_file_to_dir(self):
+        # Move a file inside an existing dir on the same filesystem.
+        self._check_move_file(self.src_file, self.dst_dir, self.dst_file)
+
+    def test_move_file_other_fs(self):
+        # Move a file to an existing dir on another filesystem.
+        if not self.dir_other_fs:
+            # skip
+            return
+        self._check_move_file(self.src_file, self.file_other_fs,
+            self.file_other_fs)
+
+    def test_move_file_to_dir_other_fs(self):
+        # Move a file to another location on another filesystem.
+        if not self.dir_other_fs:
+            # skip
+            return
+        self._check_move_file(self.src_file, self.dir_other_fs,
+            self.file_other_fs)
+
+    def test_move_dir(self):
+        # Move a dir to another location on the same filesystem.
+        dst_dir = tempfile.mktemp()
+        try:
+            self._check_move_dir(self.src_dir, dst_dir, dst_dir)
+        finally:
+            try:
+                shutil.rmtree(dst_dir)
+            except:
+                pass
+
+    def test_move_dir_other_fs(self):
+        # Move a dir to another location on another filesystem.
+        if not self.dir_other_fs:
+            # skip
+            return
+        dst_dir = tempfile.mktemp(dir=self.dir_other_fs)
+        try:
+            self._check_move_dir(self.src_dir, dst_dir, dst_dir)
+        finally:
+            try:
+                shutil.rmtree(dst_dir)
+            except:
+                pass
+
+    def test_move_dir_to_dir(self):
+        # Move a dir inside an existing dir on the same filesystem.
+        self._check_move_dir(self.src_dir, self.dst_dir,
+            os.path.join(self.dst_dir, os.path.basename(self.src_dir)))
+
+    def test_move_dir_to_dir_other_fs(self):
+        # Move a dir inside an existing dir on another filesystem.
+        if not self.dir_other_fs:
+            # skip
+            return
+        self._check_move_dir(self.src_dir, self.dir_other_fs,
+            os.path.join(self.dir_other_fs, os.path.basename(self.src_dir)))
+
+    def test_existing_file_inside_dest_dir(self):
+        # A file with the same name inside the destination dir already exists.
+        f = open(self.dst_file, "wb")
+        try:
+            pass
+        finally:
+            f.close()
+        self.assertRaises(shutil.Error, shutil.move, self.src_file, self.dst_dir)
+
+    def test_dont_move_dir_in_itself(self):
+        # Moving a dir inside itself raises an Error.
+        dst = os.path.join(self.src_dir, "bar")
+        self.assertRaises(shutil.Error, shutil.move, self.src_dir, dst)
+
+    def test_destinsrc_false_negative(self):
+        os.mkdir(TESTFN)
+        try:
+            for src, dst in [('srcdir', 'srcdir/dest')]:
+                src = os.path.join(TESTFN, src)
+                dst = os.path.join(TESTFN, dst)
+                self.assertTrue(shutil._destinsrc(src, dst),
+                             msg='_destinsrc() wrongly concluded that '
+                             'dst (%s) is not in src (%s)' % (dst, src))
+        finally:
+            shutil.rmtree(TESTFN, ignore_errors=True)
+
+    def test_destinsrc_false_positive(self):
+        os.mkdir(TESTFN)
+        try:
+            for src, dst in [('srcdir', 'src/dest'), ('srcdir', 'srcdir.new')]:
+                src = os.path.join(TESTFN, src)
+                dst = os.path.join(TESTFN, dst)
+                self.assertFalse(shutil._destinsrc(src, dst),
+                            msg='_destinsrc() wrongly concluded that '
+                            'dst (%s) is in src (%s)' % (dst, src))
+        finally:
+            shutil.rmtree(TESTFN, ignore_errors=True)
+
+
+class TestCopyFile(unittest.TestCase):
+
+    _delete = False
+
+    class Faux(object):
+        _entered = False
+        _exited_with = None
+        _raised = False
+
+        def __init__(self, raise_in_exit=False, suppress_at_exit=True):
+            self._raise_in_exit = raise_in_exit
+            self._suppress_at_exit = suppress_at_exit
+
+        def read(self, *args):
+            return ''
+
+        def __enter__(self):
+            self._entered = True
+
+        def __exit__(self, exc_type, exc_val, exc_tb):
+            self._exited_with = exc_type, exc_val, exc_tb
+            if self._raise_in_exit:
+                self._raised = True
+                raise IOError("Cannot close")
+            return self._suppress_at_exit
+
+    def tearDown(self):
+        if self._delete:
+            del shutil.open
+
+    def _set_shutil_open(self, func):
+        shutil.open = func
+        self._delete = True
+
+    def test_w_source_open_fails(self):
+        def _open(filename, mode='r'):
+            if filename == 'srcfile':
+                raise IOError('Cannot open "srcfile"')
+            assert 0  # shouldn't reach here.
+
+        self._set_shutil_open(_open)
+
+        self.assertRaises(IOError, shutil.copyfile, 'srcfile', 'destfile')
+
+    @unittest.skip("can't use the with statement and support 2.4")
+    def test_w_dest_open_fails(self):
+
+        srcfile = self.Faux()
+
+        def _open(filename, mode='r'):
+            if filename == 'srcfile':
+                return srcfile
+            if filename == 'destfile':
+                raise IOError('Cannot open "destfile"')
+            assert 0  # shouldn't reach here.
+
+        self._set_shutil_open(_open)
+
+        shutil.copyfile('srcfile', 'destfile')
+        self.assertTrue(srcfile._entered)
+        self.assertTrue(srcfile._exited_with[0] is IOError)
+        self.assertEqual(srcfile._exited_with[1].args,
+                         ('Cannot open "destfile"',))
+
+    @unittest.skip("can't use the with statement and support 2.4")
+    def test_w_dest_close_fails(self):
+
+        srcfile = self.Faux()
+        destfile = self.Faux(True)
+
+        def _open(filename, mode='r'):
+            if filename == 'srcfile':
+                return srcfile
+            if filename == 'destfile':
+                return destfile
+            assert 0  # shouldn't reach here.
+
+        self._set_shutil_open(_open)
+
+        shutil.copyfile('srcfile', 'destfile')
+        self.assertTrue(srcfile._entered)
+        self.assertTrue(destfile._entered)
+        self.assertTrue(destfile._raised)
+        self.assertTrue(srcfile._exited_with[0] is IOError)
+        self.assertEqual(srcfile._exited_with[1].args,
+                         ('Cannot close',))
+
+    @unittest.skip("can't use the with statement and support 2.4")
+    def test_w_source_close_fails(self):
+
+        srcfile = self.Faux(True)
+        destfile = self.Faux()
+
+        def _open(filename, mode='r'):
+            if filename == 'srcfile':
+                return srcfile
+            if filename == 'destfile':
+                return destfile
+            assert 0  # shouldn't reach here.
+
+        self._set_shutil_open(_open)
+
+        self.assertRaises(IOError,
+                          shutil.copyfile, 'srcfile', 'destfile')
+        self.assertTrue(srcfile._entered)
+        self.assertTrue(destfile._entered)
+        self.assertFalse(destfile._raised)
+        self.assertTrue(srcfile._exited_with[0] is None)
+        self.assertTrue(srcfile._raised)
+
+
+def test_suite():
+    suite = unittest.TestSuite()
+    load = unittest.defaultTestLoader.loadTestsFromTestCase
+    suite.addTest(load(TestCopyFile))
+    suite.addTest(load(TestMove))
+    suite.addTest(load(TestShutil))
+    return suite
+
+
+if __name__ == '__main__':
+    unittest.main(defaultTest='test_suite')
diff --git a/distutils2/_backport/tests/test_sysconfig.py b/distutils2/_backport/tests/test_sysconfig.py
--- a/distutils2/_backport/tests/test_sysconfig.py
+++ b/distutils2/_backport/tests/test_sysconfig.py
@@ -4,7 +4,7 @@
 import sys
 import subprocess
 import shutil
-from copy import copy, deepcopy
+from copy import copy
 from ConfigParser import RawConfigParser
 from StringIO import StringIO
 
@@ -15,13 +15,9 @@
         get_scheme_names, _main, _SCHEMES)
 
 from distutils2.tests import unittest
-from distutils2.tests.support import EnvironGuard
+from distutils2.tests.support import EnvironGuard, skip_unless_symlink
 from test.test_support import TESTFN, unlink
-try:
-    from test.test_support import skip_unless_symlink
-except ImportError:
-    skip_unless_symlink = unittest.skip(
-        'requires test.test_support.skip_unless_symlink')
+
 
 class TestSysConfig(EnvironGuard, unittest.TestCase):
 
diff --git a/distutils2/command/build_py.py b/distutils2/command/build_py.py
--- a/distutils2/command/build_py.py
+++ b/distutils2/command/build_py.py
@@ -8,7 +8,6 @@
 import logging
 from glob import glob
 
-import distutils2
 from distutils2.command.cmd import Command
 from distutils2.errors import DistutilsOptionError, DistutilsFileError
 from distutils2.util import convert_path
@@ -66,10 +65,9 @@
         self.packages = self.distribution.packages
         self.py_modules = self.distribution.py_modules
         self.package_data = self.distribution.package_data
-        self.package_dir = {}
-        if self.distribution.package_dir:
-            for name, path in self.distribution.package_dir.iteritems():
-                self.package_dir[name] = convert_path(path)
+        self.package_dir = None
+        if self.distribution.package_dir is not None:
+            self.package_dir = convert_path(self.distribution.package_dir)
         self.data_files = self.get_data_files()
 
         # Ick, copied straight from install_lib.py (fancy_getopt needs a
@@ -164,11 +162,13 @@
 
         Helper function for `run()`.
         """
+        # FIXME add tests for this method
         for package, src_dir, build_dir, filenames in self.data_files:
             for filename in filenames:
                 target = os.path.join(build_dir, filename)
+                srcfile = os.path.join(src_dir, filename)
                 self.mkpath(os.path.dirname(target))
-                outf, copied = self.copy_file(os.path.join(src_dir, filename),
+                outf, copied = self.copy_file(srcfile,
                                target, preserve_mode=False)
                 if copied and srcfile in self.distribution.convert_2to3.doctests:
                     self._doctests_2to3.append(outf)
@@ -179,41 +179,14 @@
         """Return the directory, relative to the top of the source
            distribution, where package 'package' should be found
            (at least according to the 'package_dir' option, if any)."""
+        path = package.split('.')
+        if self.package_dir is not None:
+            path.insert(0, self.package_dir)
 
-        path = package.split('.')
+        if len(path) > 0:
+            return os.path.join(*path)
 
-        if not self.package_dir:
-            if path:
-                return os.path.join(*path)
-            else:
-                return ''
-        else:
-            tail = []
-            while path:
-                try:
-                    pdir = self.package_dir['.'.join(path)]
-                except KeyError:
-                    tail.insert(0, path[-1])
-                    del path[-1]
-                else:
-                    tail.insert(0, pdir)
-                    return os.path.join(*tail)
-            else:
-                # Oops, got all the way through 'path' without finding a
-                # match in package_dir.  If package_dir defines a directory
-                # for the root (nameless) package, then fallback on it;
-                # otherwise, we might as well have not consulted
-                # package_dir at all, as we just use the directory implied
-                # by 'tail' (which should be the same as the original value
-                # of 'path' at this point).
-                pdir = self.package_dir.get('')
-                if pdir is not None:
-                    tail.insert(0, pdir)
-
-                if tail:
-                    return os.path.join(*tail)
-                else:
-                    return ''
+        return ''
 
     def check_package(self, package, package_dir):
         """Helper function for `find_package_modules()` and `find_modules()'.
diff --git a/distutils2/command/cmd.py b/distutils2/command/cmd.py
--- a/distutils2/command/cmd.py
+++ b/distutils2/command/cmd.py
@@ -165,7 +165,10 @@
             header = "command options for '%s':" % self.get_command_name()
         self.announce(indent + header, level=logging.INFO)
         indent = indent + "  "
+        negative_opt = getattr(self, 'negative_opt', ())
         for (option, _, _) in self.user_options:
+            if option in negative_opt:
+                continue
             option = option.replace('-', '_')
             if option[-1] == "=":
                 option = option[:-1]
diff --git a/distutils2/command/sdist.py b/distutils2/command/sdist.py
--- a/distutils2/command/sdist.py
+++ b/distutils2/command/sdist.py
@@ -18,7 +18,8 @@
 from distutils2.command import get_command_names
 from distutils2.command.cmd import Command
 from distutils2.errors import (DistutilsPlatformError, DistutilsOptionError,
-                               DistutilsTemplateError, DistutilsModuleError)
+                               DistutilsTemplateError, DistutilsModuleError,
+                               DistutilsFileError)
 from distutils2.manifest import Manifest
 from distutils2 import logger
 from distutils2.util import convert_path, resolve_name
@@ -291,6 +292,12 @@
             logger.warn("no files to distribute -- empty manifest?")
         else:
             logger.info(msg)
+
+        for file in self.distribution.metadata.requires_files:
+            if file not in files:
+                msg = "'%s' must be included explicitly in 'extra_files'" % file
+                raise DistutilsFileError(msg)
+
         for file in files:
             if not os.path.isfile(file):
                 logger.warn("'%s' not a regular file -- skipping" % file)
diff --git a/distutils2/config.py b/distutils2/config.py
--- a/distutils2/config.py
+++ b/distutils2/config.py
@@ -7,7 +7,8 @@
 from ConfigParser import RawConfigParser
 
 from distutils2 import logger
-from distutils2.util import check_environ, resolve_name
+from distutils2.errors import DistutilsOptionError
+from distutils2.util import check_environ, resolve_name, strtobool
 from distutils2.compiler import set_compiler
 from distutils2.command import set_command
 
@@ -76,10 +77,9 @@
         return value
 
     def _multiline(self, value):
-        if '\n' in value:
-            value = [v for v in
-                        [v.strip() for v in value.split('\n')]
-                        if v != '']
+        value = [v for v in
+                [v.strip() for v in value.split('\n')]
+                if v != '']
         return value
 
     def _read_setup_cfg(self, parser):
@@ -100,7 +100,9 @@
         if 'metadata' in content:
             for key, value in content['metadata'].iteritems():
                 key = key.replace('_', '-')
-                value = self._multiline(value)
+                if metadata.is_multi_field(key):
+                    value = self._multiline(value)
+
                 if key == 'project-url':
                     value = [(label.strip(), url.strip())
                              for label, url in
@@ -112,30 +114,45 @@
                                "mutually exclusive")
                         raise DistutilsOptionError(msg)
 
-                    f = open(value)    # will raise if file not found
-                    try:
-                        value = f.read()
-                    finally:
-                        f.close()
+                    if isinstance(value, list):
+                        filenames = value
+                    else:
+                        filenames = value.split()
+
+                    # concatenate each files
+                    value = ''
+                    for filename in filenames:
+                        f = open(filename)    # will raise if file not found
+                        try:
+                            value += f.read().strip() + '\n'
+                        finally:
+                            f.close()
+                        # add filename as a required file
+                        if filename not in metadata.requires_files:
+                            metadata.requires_files.append(filename)
+                    value = value.strip()
                     key = 'description'
 
                 if metadata.is_metadata_field(key):
                     metadata[key] = self._convert_metadata(key, value)
 
+
         if 'files' in content:
-            files = dict([(key, self._multiline(value))
+            def _convert(key, value):
+                if key not in ('packages_root',):
+                    value = self._multiline(value)
+                return value
+
+            files = dict([(key, _convert(key, value))
                           for key, value in content['files'].iteritems()])
             self.dist.packages = []
-            self.dist.package_dir = {}
+            self.dist.package_dir = pkg_dir = files.get('packages_root')
 
             packages = files.get('packages', [])
             if isinstance(packages, str):
                 packages = [packages]
 
             for package in packages:
-                if ':' in package:
-                    dir_, package = package.split(':')
-                    self.dist.package_dir[package] = dir_
                 self.dist.packages.append(package)
 
             self.dist.py_modules = files.get('modules', [])
diff --git a/distutils2/index/dist.py b/distutils2/index/dist.py
--- a/distutils2/index/dist.py
+++ b/distutils2/index/dist.py
@@ -17,19 +17,19 @@
 import urllib
 import urlparse
 import zipfile
-
 try:
     import hashlib
 except ImportError:
     from distutils2._backport import hashlib
 
+from distutils2._backport.shutil import unpack_archive
 from distutils2.errors import IrrationalVersionError
 from distutils2.index.errors import (HashDoesNotMatch, UnsupportedHashName,
                                      CantParseArchiveName)
 from distutils2.version import (suggest_normalized_version, NormalizedVersion,
                                 get_version_predicate)
 from distutils2.metadata import DistributionMetadata
-from distutils2.util import untar_file, unzip_file, splitext
+from distutils2.util import splitext
 
 __all__ = ['ReleaseInfo', 'DistInfo', 'ReleasesList', 'get_infos_from_url']
 
@@ -206,6 +206,7 @@
     __hash__ = object.__hash__
 
 
+
 class DistInfo(IndexReference):
     """Represents a distribution retrieved from an index (sdist, bdist, ...)
     """
@@ -313,17 +314,8 @@
 
             filename = self.download()
             content_type = mimetypes.guess_type(filename)[0]
+            self._unpacked_dir = unpack_archive(filename)
 
-            if (content_type == 'application/zip'
-                or filename.endswith('.zip')
-                or filename.endswith('.pybundle')
-                or zipfile.is_zipfile(filename)):
-                unzip_file(filename, path, flatten=not filename.endswith('.pybundle'))
-            elif (content_type == 'application/x-gzip'
-                  or tarfile.is_tarfile(filename)
-                  or splitext(filename)[1].lower() in ('.tar', '.tar.gz', '.tar.bz2', '.tgz', '.tbz')):
-                untar_file(filename, path)
-            self._unpacked_dir = path
         return self._unpacked_dir
 
     def _check_md5(self, filename):
diff --git a/distutils2/index/xmlrpc.py b/distutils2/index/xmlrpc.py
--- a/distutils2/index/xmlrpc.py
+++ b/distutils2/index/xmlrpc.py
@@ -127,10 +127,17 @@
         return release
 
     def get_metadata(self, project_name, version):
-        """Retreive project metadatas.
+        """Retrieve project metadata.
 
         Return a ReleaseInfo object, with metadata informations filled in.
         """
+        # to be case-insensitive, get the informations from the XMLRPC API
+        projects = [d['name'] for d in
+                    self.proxy.search({'name': project_name})
+                    if d['name'].lower() == project_name]
+        if len(projects) > 0:
+            project_name = projects[0]
+
         metadata = self.proxy.release_data(project_name, version)
         project = self._get_project(project_name)
         if version not in project.get_versions():
diff --git a/distutils2/install.py b/distutils2/install.py
--- a/distutils2/install.py
+++ b/distutils2/install.py
@@ -1,14 +1,16 @@
 from tempfile import mkdtemp
-import logging
 import shutil
 import os
 import errno
 import itertools
 
+from distutils2 import logger
 from distutils2._backport.pkgutil import get_distributions
+from distutils2._backport.sysconfig import get_config_var
 from distutils2.depgraph import generate_graph
 from distutils2.index import wrapper
 from distutils2.index.errors import ProjectNotFound, ReleaseNotFound
+from distutils2.version import get_version_predicate
 
 """Provides installations scripts.
 
@@ -53,7 +55,63 @@
             else:
                 raise e
         os.rename(old, new)
-        yield(old, new)
+        yield (old, new)
+
+
+def _run_d1_install(archive_dir, path):
+    # backward compat: using setuptools or plain-distutils
+    cmd = '%s setup.py install --root=%s --record=%s'
+    setup_py = os.path.join(archive_dir, 'setup.py')
+    if 'setuptools' in open(setup_py).read():
+        cmd += ' --single-version-externally-managed'
+
+    # how to place this file in the egg-info dir
+    # for non-distutils2 projects ?
+    record_file = os.path.join(archive_dir, 'RECORD')
+    os.system(cmd % (sys.executable, path, record_file))
+    if not os.path.exists(record_file):
+        raise ValueError('Failed to install.')
+    return open(record_file).read().split('\n')
+
+
+def _run_d2_install(archive_dir, path):
+    # using our own install command
+    raise NotImplementedError()
+
+
+def _install_dist(dist, path):
+    """Install a distribution into a path.
+
+    This:
+
+    * unpack the distribution
+    * copy the files in "path"
+    * determine if the distribution is distutils2 or distutils1.
+    """
+    where = dist.unpack(archive)
+
+    # get into the dir
+    archive_dir = None
+    for item in os.listdir(where):
+        fullpath = os.path.join(where, item)
+        if os.path.isdir(fullpath):
+            archive_dir = fullpath
+            break
+
+    if archive_dir is None:
+        raise ValueError('Cannot locate the unpacked archive')
+
+    # install
+    old_dir = os.getcwd()
+    os.chdir(archive_dir)
+    try:
+        # distutils2 or distutils1 ?
+        if 'setup.py' in os.listdir(archive_dir):
+            return _run_d1_install(archive_dir, path)
+        else:
+            return _run_d2_install(archive_dir, path)
+    finally:
+        os.chdir(old_dir)
 
 
 def install_dists(dists, path=None):
@@ -65,19 +123,23 @@
     Return a list of installed files.
 
     :param dists: distributions to install
-    :param path: base path to install distribution on
+    :param path: base path to install distribution in
     """
     if not path:
         path = mkdtemp()
 
     installed_dists, installed_files = [], []
     for d in dists:
+        logger.info('Installing %s %s' % (d.name, d.version))
         try:
-            installed_files.extend(d.install(path))
+            installed_files.extend(_install_dist(d, path))
             installed_dists.append(d)
         except Exception, e :
+            logger.info('Failed. %s' % str(e))
+
+            # reverting
             for d in installed_dists:
-                d.uninstall()
+                uninstall(d)
             raise e
     return installed_files
 
@@ -123,16 +185,26 @@
     try:
         if install:
             installed_files = install_dists(install, install_path)  # install to tmp first
-        for files in temp_files.itervalues():
-            for old, new in files:
-                os.remove(new)
 
-    except Exception,e:
+    except:
         # if an error occurs, put back the files in the good place.
-        for files in temp_files.itervalues():
+        for files in temp_files.values():
             for old, new in files:
                 shutil.move(new, old)
 
+        # now re-raising
+        raise
+
+    # we can remove them for good
+    for files in temp_files.values():
+        for old, new in files:
+            os.remove(new)
+
+
+def _get_setuptools_deps(release):
+    # NotImplementedError
+    pass
+
 
 def get_infos(requirements, index=None, installed=None, prefer_final=True):
     """Return the informations on what's going to be installed and upgraded.
@@ -154,44 +226,74 @@
     Conflict contains all the conflicting distributions, if there is a
     conflict.
     """
+    if not installed:
+        logger.info('Reading installed distributions')
+        installed = get_distributions(use_egg_info=True)
+
+    infos = {'install': [], 'remove': [], 'conflict': []}
+    # Is a compatible version of the project is already installed ?
+    predicate = get_version_predicate(requirements)
+    found = False
+    installed = list(installed)
+
+    # check that the project isnt already installed
+    for installed_project in installed:
+        # is it a compatible project ?
+        if predicate.name.lower() != installed_project.name.lower():
+            continue
+        found = True
+        logger.info('Found %s %s' % (installed_project.name,
+                                     installed_project.version))
+
+        # if we already have something installed, check it matches the
+        # requirements
+        if predicate.match(installed_project.version):
+            return infos
+        break
+
+    if not found:
+        logger.info('Project not installed.')
 
     if not index:
         index = wrapper.ClientWrapper()
 
-    if not installed:
-        installed = get_distributions(use_egg_info=True)
-
     # Get all the releases that match the requirements
     try:
         releases = index.get_releases(requirements)
     except (ReleaseNotFound, ProjectNotFound), e:
         raise InstallationException('Release not found: "%s"' % requirements)
-
+    
     # Pick up a release, and try to get the dependency tree
     release = releases.get_last(requirements, prefer_final=prefer_final)
 
-    # Iter since we found something without conflicts
+    if release is None:
+        logger.info('Could not find a matching project')
+        return infos
+
+    # this works for Metadata 1.2
     metadata = release.fetch_metadata()
 
-    # Get the distributions already_installed on the system
-    # and add the one we want to install
+    # for earlier, we need to build setuptools deps if any
+    if 'requires_dist' not in metadata:
+        deps = _get_setuptools_deps(release)
+    else:
+        deps = metadata['requires_dist']
 
     distributions = itertools.chain(installed, [release])
     depgraph = generate_graph(distributions)
 
     # Store all the already_installed packages in a list, in case of rollback.
-    infos = {'install': [], 'remove': [], 'conflict': []}
-
     # Get what the missing deps are
-    for dists in depgraph.missing.itervalues():
-        if dists:
-            logging.info("missing dependencies found, installing them")
-            # we have missing deps
-            for dist in dists:
-                _update_infos(infos, get_infos(dist, index, installed))
+    dists = depgraph.missing[release]
+    if dists:
+        logger.info("missing dependencies found, retrieving metadata")
+        # we have missing deps
+        for dist in dists:
+            _update_infos(infos, get_infos(dist, index, installed))
 
     # Fill in the infos
     existing = [d for d in installed if d.name == release.name]
+
     if existing:
         infos['remove'].append(existing[0])
         infos['conflict'].extend(depgraph.reverse_list[existing[0]])
@@ -203,16 +305,46 @@
     """extends the lists contained in the `info` dict with those contained
     in the `new_info` one
     """
-    for key, value in infos.iteritems():
+    for key, value in infos.items():
         if key in new_infos:
             infos[key].extend(new_infos[key])
 
 
+def remove(project_name):
+    """Removes a single project from the installation"""
+    pass
+
+
+
+
 def main(**attrs):
     if 'script_args' not in attrs:
         import sys
         attrs['requirements'] = sys.argv[1]
     get_infos(**attrs)
 
+
+def install(project):
+    logger.info('Getting information about "%s".' % project)
+    try:
+        info = get_infos(project)
+    except InstallationException:
+        logger.info('Cound not find "%s".' % project)
+        return
+
+    if info['install'] == []:
+        logger.info('Nothing to install.')
+        return
+
+    install_path = get_config_var('base')
+    try:
+        install_from_infos(info['install'], info['remove'], info['conflict'],
+                           install_path=install_path)
+
+    except InstallationConflict, e:
+        projects = ['%s %s' % (p.name, p.version) for p in e.args[0]]
+        logger.info('"%s" conflicts with "%s"' % (project, ','.join(projects)))
+
+
 if __name__ == '__main__':
     main()
diff --git a/distutils2/markers.py b/distutils2/markers.py
new file mode 100644
--- /dev/null
+++ b/distutils2/markers.py
@@ -0,0 +1,194 @@
+""" Micro-language for PEP 345 environment markers
+"""
+import sys
+import platform
+import os
+from tokenize import tokenize, NAME, OP, STRING, ENDMARKER
+from StringIO import StringIO
+
+__all__ = ['interpret']
+
+
+# allowed operators
+_OPERATORS = {'==': lambda x, y: x == y,
+              '!=': lambda x, y: x != y,
+              '>': lambda x, y: x > y,
+              '>=': lambda x, y: x >= y,
+              '<': lambda x, y: x < y,
+              '<=': lambda x, y: x <= y,
+              'in': lambda x, y: x in y,
+              'not in': lambda x, y: x not in y}
+
+
+def _operate(operation, x, y):
+    return _OPERATORS[operation](x, y)
+
+
+# restricted set of variables
+_VARS = {'sys.platform': sys.platform,
+         'python_version': sys.version[:3],
+         'python_full_version': sys.version.split(' ', 1)[0],
+         'os.name': os.name,
+         'platform.version': platform.version(),
+         'platform.machine': platform.machine()}
+
+
+class _Operation(object):
+
+    def __init__(self, execution_context=None):
+        self.left = None
+        self.op = None
+        self.right = None
+        if execution_context is None:
+            execution_context = {}
+        self.execution_context = execution_context
+
+    def _get_var(self, name):
+        if name in self.execution_context:
+            return self.execution_context[name]
+        return _VARS[name]
+
+    def __repr__(self):
+        return '%s %s %s' % (self.left, self.op, self.right)
+
+    def _is_string(self, value):
+        if value is None or len(value) < 2:
+            return False
+        for delimiter in '"\'':
+            if value[0] == value[-1] == delimiter:
+                return True
+        return False
+
+    def _is_name(self, value):
+        return value in _VARS
+
+    def _convert(self, value):
+        if value in _VARS:
+            return self._get_var(value)
+        return value.strip('"\'')
+
+    def _check_name(self, value):
+        if value not in _VARS:
+            raise NameError(value)
+
+    def _nonsense_op(self):
+        msg = 'This operation is not supported : "%s"' % self
+        raise SyntaxError(msg)
+
+    def __call__(self):
+        # make sure we do something useful
+        if self._is_string(self.left):
+            if self._is_string(self.right):
+                self._nonsense_op()
+            self._check_name(self.right)
+        else:
+            if not self._is_string(self.right):
+                self._nonsense_op()
+            self._check_name(self.left)
+
+        if self.op not in _OPERATORS:
+            raise TypeError('Operator not supported "%s"' % self.op)
+
+        left = self._convert(self.left)
+        right = self._convert(self.right)
+        return _operate(self.op, left, right)
+
+
+class _OR(object):
+    def __init__(self, left, right=None):
+        self.left = left
+        self.right = right
+
+    def filled(self):
+        return self.right is not None
+
+    def __repr__(self):
+        return 'OR(%r, %r)' % (self.left, self.right)
+
+    def __call__(self):
+        return self.left() or self.right()
+
+
+class _AND(object):
+    def __init__(self, left, right=None):
+        self.left = left
+        self.right = right
+
+    def filled(self):
+        return self.right is not None
+
+    def __repr__(self):
+        return 'AND(%r, %r)' % (self.left, self.right)
+
+    def __call__(self):
+        return self.left() and self.right()
+
+
+class _CHAIN(object):
+
+    def __init__(self, execution_context=None):
+        self.ops = []
+        self.op_starting = True
+        self.execution_context = execution_context
+
+    def eat(self, toktype, tokval, rowcol, line, logical_line):
+        if toktype not in (NAME, OP, STRING, ENDMARKER):
+            raise SyntaxError('Type not supported "%s"' % tokval)
+
+        if self.op_starting:
+            op = _Operation(self.execution_context)
+            if len(self.ops) > 0:
+                last = self.ops[-1]
+                if isinstance(last, (_OR, _AND)) and not last.filled():
+                    last.right = op
+                else:
+                    self.ops.append(op)
+            else:
+                self.ops.append(op)
+            self.op_starting = False
+        else:
+            op = self.ops[-1]
+
+        if (toktype == ENDMARKER or
+            (toktype == NAME and tokval in ('and', 'or'))):
+            if toktype == NAME and tokval == 'and':
+                self.ops.append(_AND(self.ops.pop()))
+            elif toktype == NAME and tokval == 'or':
+                self.ops.append(_OR(self.ops.pop()))
+            self.op_starting = True
+            return
+
+        if isinstance(op, (_OR, _AND)) and op.right is not None:
+            op = op.right
+
+        if ((toktype in (NAME, STRING) and tokval not in ('in', 'not'))
+            or (toktype == OP and tokval == '.')):
+            if op.op is None:
+                if op.left is None:
+                    op.left = tokval
+                else:
+                    op.left += tokval
+            else:
+                if op.right is None:
+                    op.right = tokval
+                else:
+                    op.right += tokval
+        elif toktype == OP or tokval in ('in', 'not'):
+            if tokval == 'in' and op.op == 'not':
+                op.op = 'not in'
+            else:
+                op.op = tokval
+
+    def result(self):
+        for op in self.ops:
+            if not op():
+                return False
+        return True
+
+
+def interpret(marker, execution_context=None):
+    """Interpret a marker and return a result depending on environment."""
+    marker = marker.strip()
+    operations = _CHAIN(execution_context)
+    tokenize(StringIO(marker).readline, operations.eat)
+    return operations.result()
diff --git a/distutils2/metadata.py b/distutils2/metadata.py
--- a/distutils2/metadata.py
+++ b/distutils2/metadata.py
@@ -5,13 +5,12 @@
 
 import os
 import sys
-import platform
 import re
 from StringIO import StringIO
 from email import message_from_file
-from tokenize import tokenize, NAME, OP, STRING, ENDMARKER
 
 from distutils2 import logger
+from distutils2.markers import interpret
 from distutils2.version import (is_valid_predicate, is_valid_version,
                                 is_valid_versions)
 from distutils2.errors import (MetadataMissingError,
@@ -78,13 +77,13 @@
                 'Obsoletes-Dist', 'Requires-External', 'Maintainer',
                 'Maintainer-email', 'Project-URL')
 
+_345_REQUIRED = ('Name', 'Version')
+
 _ALL_FIELDS = set()
 _ALL_FIELDS.update(_241_FIELDS)
 _ALL_FIELDS.update(_314_FIELDS)
 _ALL_FIELDS.update(_345_FIELDS)
 
-_345_REQUIRED = ('Name', 'Version')
-
 def _version2fieldlist(version):
     if version == '1.0':
         return _241_FIELDS
@@ -174,14 +173,19 @@
 _LISTFIELDS = ('Platform', 'Classifier', 'Obsoletes',
         'Requires', 'Provides', 'Obsoletes-Dist',
         'Provides-Dist', 'Requires-Dist', 'Requires-External',
-        'Project-URL')
+        'Project-URL', 'Supported-Platform')
 _LISTTUPLEFIELDS = ('Project-URL',)
 
 _ELEMENTSFIELD = ('Keywords',)
 
 _UNICODEFIELDS = ('Author', 'Maintainer', 'Summary', 'Description')
 
-_MISSING = object()
+class NoDefault(object):
+    """Marker object used for clean representation"""
+    def __repr__(self):
+        return '<NoDefault>'
+
+_MISSING = NoDefault()
 
 class DistributionMetadata(object):
     """The metadata of a release.
@@ -202,6 +206,7 @@
         self._fields = {}
         self.display_warnings = display_warnings
         self.version = None
+        self.requires_files = []
         self.docutils_support = _HAS_DOCUTILS
         self.platform_dependent = platform_dependent
         self.execution_context = execution_context
@@ -285,7 +290,7 @@
         if not self.platform_dependent or ';' not in value:
             return True, value
         value, marker = value.split(';')
-        return _interpret(marker, self.execution_context), value
+        return interpret(marker, self.execution_context), value
 
     def _remove_line_prefix(self, value):
         return _LINE_PREFIX.sub('\n', value)
@@ -294,13 +299,20 @@
     # Public API
     #
     def get_fullname(self):
+        """Return the distribution name with version"""
         return '%s-%s' % (self['Name'], self['Version'])
 
     def is_metadata_field(self, name):
+        """return True if name is a valid metadata key"""
         name = self._convert_name(name)
         return name in _ALL_FIELDS
 
+    def is_multi_field(self, name):
+        name = self._convert_name(name)
+        return name in _LISTFIELDS
+
     def read(self, filepath):
+        """Read the metadata values from a file path."""
         self.read_file(open(filepath))
 
     def read_file(self, fileob):
@@ -454,7 +466,8 @@
         return value
 
     def check(self, strict=False):
-        """Check if the metadata is compliant."""
+        """Check if the metadata is compliant. If strict is False then raise if
+        no Name or Version are provided"""
         # XXX should check the versions (if the file was loaded)
         missing, warnings = [], []
 
@@ -494,198 +507,13 @@
         return missing, warnings
 
     def keys(self):
+        """Dict like api"""
         return _version2fieldlist(self.version)
 
     def values(self):
+        """Dict like api"""
         return [self[key] for key in self.keys()]
 
     def items(self):
+        """Dict like api"""
         return [(key, self[key]) for key in self.keys()]
-
-
-#
-# micro-language for PEP 345 environment markers
-#
-
-# allowed operators
-_OPERATORS = {'==': lambda x, y: x == y,
-              '!=': lambda x, y: x != y,
-              '>': lambda x, y: x > y,
-              '>=': lambda x, y: x >= y,
-              '<': lambda x, y: x < y,
-              '<=': lambda x, y: x <= y,
-              'in': lambda x, y: x in y,
-              'not in': lambda x, y: x not in y}
-
-
-def _operate(operation, x, y):
-    return _OPERATORS[operation](x, y)
-
-# restricted set of variables
-_VARS = {'sys.platform': sys.platform,
-         'python_version': sys.version[:3],
-         'python_full_version': sys.version.split(' ', 1)[0],
-         'os.name': os.name,
-         'platform.version': platform.version(),
-         'platform.machine': platform.machine()}
-
-
-class _Operation(object):
-
-    def __init__(self, execution_context=None):
-        self.left = None
-        self.op = None
-        self.right = None
-        if execution_context is None:
-            execution_context = {}
-        self.execution_context = execution_context
-
-    def _get_var(self, name):
-        if name in self.execution_context:
-            return self.execution_context[name]
-        return _VARS[name]
-
-    def __repr__(self):
-        return '%s %s %s' % (self.left, self.op, self.right)
-
-    def _is_string(self, value):
-        if value is None or len(value) < 2:
-            return False
-        for delimiter in '"\'':
-            if value[0] == value[-1] == delimiter:
-                return True
-        return False
-
-    def _is_name(self, value):
-        return value in _VARS
-
-    def _convert(self, value):
-        if value in _VARS:
-            return self._get_var(value)
-        return value.strip('"\'')
-
-    def _check_name(self, value):
-        if value not in _VARS:
-            raise NameError(value)
-
-    def _nonsense_op(self):
-        msg = 'This operation is not supported : "%s"' % self
-        raise SyntaxError(msg)
-
-    def __call__(self):
-        # make sure we do something useful
-        if self._is_string(self.left):
-            if self._is_string(self.right):
-                self._nonsense_op()
-            self._check_name(self.right)
-        else:
-            if not self._is_string(self.right):
-                self._nonsense_op()
-            self._check_name(self.left)
-
-        if self.op not in _OPERATORS:
-            raise TypeError('Operator not supported "%s"' % self.op)
-
-        left = self._convert(self.left)
-        right = self._convert(self.right)
-        return _operate(self.op, left, right)
-
-
-class _OR(object):
-    def __init__(self, left, right=None):
-        self.left = left
-        self.right = right
-
-    def filled(self):
-        return self.right is not None
-
-    def __repr__(self):
-        return 'OR(%r, %r)' % (self.left, self.right)
-
-    def __call__(self):
-        return self.left() or self.right()
-
-
-class _AND(object):
-    def __init__(self, left, right=None):
-        self.left = left
-        self.right = right
-
-    def filled(self):
-        return self.right is not None
-
-    def __repr__(self):
-        return 'AND(%r, %r)' % (self.left, self.right)
-
-    def __call__(self):
-        return self.left() and self.right()
-
-
-class _CHAIN(object):
-
-    def __init__(self, execution_context=None):
-        self.ops = []
-        self.op_starting = True
-        self.execution_context = execution_context
-
-    def eat(self, toktype, tokval, rowcol, line, logical_line):
-        if toktype not in (NAME, OP, STRING, ENDMARKER):
-            raise SyntaxError('Type not supported "%s"' % tokval)
-
-        if self.op_starting:
-            op = _Operation(self.execution_context)
-            if len(self.ops) > 0:
-                last = self.ops[-1]
-                if isinstance(last, (_OR, _AND)) and not last.filled():
-                    last.right = op
-                else:
-                    self.ops.append(op)
-            else:
-                self.ops.append(op)
-            self.op_starting = False
-        else:
-            op = self.ops[-1]
-
-        if (toktype == ENDMARKER or
-            (toktype == NAME and tokval in ('and', 'or'))):
-            if toktype == NAME and tokval == 'and':
-                self.ops.append(_AND(self.ops.pop()))
-            elif toktype == NAME and tokval == 'or':
-                self.ops.append(_OR(self.ops.pop()))
-            self.op_starting = True
-            return
-
-        if isinstance(op, (_OR, _AND)) and op.right is not None:
-            op = op.right
-
-        if ((toktype in (NAME, STRING) and tokval not in ('in', 'not'))
-            or (toktype == OP and tokval == '.')):
-            if op.op is None:
-                if op.left is None:
-                    op.left = tokval
-                else:
-                    op.left += tokval
-            else:
-                if op.right is None:
-                    op.right = tokval
-                else:
-                    op.right += tokval
-        elif toktype == OP or tokval in ('in', 'not'):
-            if tokval == 'in' and op.op == 'not':
-                op.op = 'not in'
-            else:
-                op.op = tokval
-
-    def result(self):
-        for op in self.ops:
-            if not op():
-                return False
-        return True
-
-
-def _interpret(marker, execution_context=None):
-    """Interpret a marker and return a result depending on environment."""
-    marker = marker.strip()
-    operations = _CHAIN(execution_context)
-    tokenize(StringIO(marker).readline, operations.eat)
-    return operations.result()
diff --git a/distutils2/run.py b/distutils2/run.py
--- a/distutils2/run.py
+++ b/distutils2/run.py
@@ -1,7 +1,9 @@
 import os
 import sys
 from optparse import OptionParser
+import logging
 
+from distutils2 import logger
 from distutils2.util import grok_environment_error
 from distutils2.errors import (DistutilsSetupError, DistutilsArgError,
                                DistutilsError, CCompilerError)
@@ -9,6 +11,7 @@
 from distutils2 import __version__
 from distutils2._backport.pkgutil import get_distributions, get_distribution
 from distutils2.depgraph import generate_graph
+from distutils2.install import install
 
 # This is a barebones help message generated displayed when the user
 # runs the setup script with no arguments at all.  More useful help
@@ -109,13 +112,23 @@
 
         except (DistutilsError,
                 CCompilerError), msg:
+            raise
             raise SystemExit, "error: " + str(msg)
 
     return dist
 
 
+def _set_logger():
+    logger.setLevel(logging.INFO)
+    sth = logging.StreamHandler(sys.stderr)
+    sth.setLevel(logging.INFO)
+    logger.addHandler(sth)
+    logger.propagate = 0
+
+
 def main():
     """Main entry point for Distutils2"""
+    _set_logger()
     parser = OptionParser()
     parser.disable_interspersed_args()
     parser.usage = '%prog [options] cmd1 cmd2 ..'
@@ -124,6 +137,10 @@
                   action="store_true", dest="version", default=False,
                   help="Prints out the version of Distutils2 and exits.")
 
+    parser.add_option("-m", "--metadata",
+                  action="append", dest="metadata", default=[],
+                  help="List METADATA metadata or 'all' for all metadatas.")
+
     parser.add_option("-s", "--search",
                   action="store", dest="search", default=None,
                   help="Search for installed distributions.")
@@ -136,11 +153,44 @@
                   action="store_true", dest="fgraph", default=False,
                   help="Display the full graph for installed distributions.")
 
+    parser.add_option("-i", "--install",
+                  action="store", dest="install",
+                  help="Install a project.")
+
+    parser.add_option("-r", "--remove",
+                  action="store", dest="remove",
+                  help="Remove a project.")
+
     options, args = parser.parse_args()
     if options.version:
         print('Distutils2 %s' % __version__)
 #        sys.exit(0)
 
+    if len(options.metadata):
+        from distutils2.dist import Distribution
+        dist = Distribution()
+        dist.parse_config_files()
+        metadata = dist.metadata
+
+        if 'all' in options.metadata:
+            keys = metadata.keys()
+        else:
+            keys = options.metadata
+            if len(keys) == 1:
+                print metadata[keys[0]]
+                sys.exit(0)
+
+        for key in keys:
+            if key in metadata:
+                print(metadata._convert_name(key)+':')
+                value = metadata[key]
+                if isinstance(value, list):
+                    for v in value:
+                        print('    '+v)
+                else:
+                    print('    '+value.replace('\n', '\n    '))
+        sys.exit(0)
+
     if options.search is not None:
         search = options.search.lower()
         for dist in get_distributions(use_egg_info=True):
@@ -169,6 +219,10 @@
         print(graph)
         sys.exit(0)
 
+    if options.install is not None:
+        install(options.install)
+        sys.exit(0)
+
     if len(args) == 0:
         parser.print_help()
         sys.exit(0)
diff --git a/distutils2/tests/pypi_server.py b/distutils2/tests/pypi_server.py
--- a/distutils2/tests/pypi_server.py
+++ b/distutils2/tests/pypi_server.py
@@ -375,6 +375,7 @@
 
     def __init__(self, dists=[]):
         self._dists = dists
+        self._search_result = []
 
     def add_distributions(self, dists):
         for dist in dists:
diff --git a/distutils2/tests/support.py b/distutils2/tests/support.py
--- a/distutils2/tests/support.py
+++ b/distutils2/tests/support.py
@@ -17,10 +17,11 @@
             super(SomeTestCase, self).setUp()
             ... # other setup code
 
-Read each class' docstring to see its purpose and usage.
+Also provided is a DummyCommand class, useful to mock commands in the
+tests of another command that needs them, a create_distribution function
+and a skip_unless_symlink decorator.
 
-Also provided is a DummyCommand class, useful to mock commands in the
-tests of another command that needs them (see docstring).
+Each class or function has a docstring to explain its purpose and usage.
 """
 
 import os
@@ -35,7 +36,8 @@
 from distutils2.tests import unittest
 
 __all__ = ['LoggingCatcher', 'WarningsCatcher', 'TempdirManager',
-           'EnvironGuard', 'DummyCommand', 'unittest']
+           'EnvironGuard', 'DummyCommand', 'unittest', 'create_distribution',
+           'skip_unless_symlink']
 
 
 class LoggingCatcher(object):
@@ -135,7 +137,7 @@
         finally:
             f.close()
 
-    def create_dist(self, pkg_name='foo', **kw):
+    def create_dist(self, **kw):
         """Create a stub distribution object and files.
 
         This function creates a Distribution instance (use keyword arguments
@@ -143,17 +145,19 @@
         (currently an empty directory).
 
         It returns the path to the directory and the Distribution instance.
-        You can use TempdirManager.write_file to write any file in that
+        You can use self.write_file to write any file in that
         directory, e.g. setup scripts or Python modules.
         """
         # Late import so that third parties can import support without
         # loading a ton of distutils2 modules in memory.
         from distutils2.dist import Distribution
+        if 'name' not in kw:
+            kw['name'] = 'foo'
         tmp_dir = self.mkdtemp()
-        pkg_dir = os.path.join(tmp_dir, pkg_name)
-        os.mkdir(pkg_dir)
+        project_dir = os.path.join(tmp_dir, kw['name'])
+        os.mkdir(project_dir)
         dist = Distribution(attrs=kw)
-        return pkg_dir, dist
+        return project_dir, dist
 
 
 class EnvironGuard(object):
@@ -211,3 +215,9 @@
     d.parse_command_line()
     return d
 
+
+try:
+    from test.test_support import skip_unless_symlink
+except ImportError:
+    skip_unless_symlink = unittest.skip(
+        'requires test.test_support.skip_unless_symlink')
diff --git a/distutils2/tests/test_command_build_ext.py b/distutils2/tests/test_command_build_ext.py
--- a/distutils2/tests/test_command_build_ext.py
+++ b/distutils2/tests/test_command_build_ext.py
@@ -289,7 +289,7 @@
 
         # inplace = 0, cmd.package = 'bar'
         build_py = cmd.get_finalized_command('build_py')
-        build_py.package_dir = {'': 'bar'}
+        build_py.package_dir = 'bar'
         path = cmd.get_ext_fullpath('foo')
         # checking that the last directory is the build_dir
         path = os.path.split(path)[0]
@@ -318,7 +318,7 @@
         dist = Distribution()
         cmd = build_ext(dist)
         cmd.inplace = 1
-        cmd.distribution.package_dir = {'': 'src'}
+        cmd.distribution.package_dir = 'src'
         cmd.distribution.packages = ['lxml', 'lxml.html']
         curdir = os.getcwd()
         wanted = os.path.join(curdir, 'src', 'lxml', 'etree' + ext)
@@ -334,7 +334,7 @@
 
         # building twisted.runner.portmap not inplace
         build_py = cmd.get_finalized_command('build_py')
-        build_py.package_dir = {}
+        build_py.package_dir = None
         cmd.distribution.packages = ['twisted', 'twisted.runner.portmap']
         path = cmd.get_ext_fullpath('twisted.runner.portmap')
         wanted = os.path.join(curdir, 'tmpdir', 'twisted', 'runner',
diff --git a/distutils2/tests/test_command_build_py.py b/distutils2/tests/test_command_build_py.py
--- a/distutils2/tests/test_command_build_py.py
+++ b/distutils2/tests/test_command_build_py.py
@@ -17,12 +17,14 @@
 
     def test_package_data(self):
         sources = self.mkdtemp()
-        f = open(os.path.join(sources, "__init__.py"), "w")
+        pkg_dir = os.path.join(sources, 'pkg')
+        os.mkdir(pkg_dir)
+        f = open(os.path.join(pkg_dir, "__init__.py"), "w")
         try:
             f.write("# Pretend this is a package.")
         finally:
             f.close()
-        f = open(os.path.join(sources, "README.txt"), "w")
+        f = open(os.path.join(pkg_dir, "README.txt"), "w")
         try:
             f.write("Info about this package")
         finally:
@@ -31,8 +33,9 @@
         destination = self.mkdtemp()
 
         dist = Distribution({"packages": ["pkg"],
-                             "package_dir": {"pkg": sources}})
+                             "package_dir": sources})
         # script_name need not exist, it just need to be initialized
+
         dist.script_name = os.path.join(sources, "setup.py")
         dist.command_obj["build"] = support.DummyCommand(
             force=0,
@@ -42,7 +45,7 @@
             use_2to3=False)
         dist.packages = ["pkg"]
         dist.package_data = {"pkg": ["README.txt"]}
-        dist.package_dir = {"pkg": sources}
+        dist.package_dir = sources
 
         cmd = build_py(dist)
         cmd.compile = 1
@@ -68,19 +71,20 @@
 
         # create the distribution files.
         sources = self.mkdtemp()
-        open(os.path.join(sources, "__init__.py"), "w").close()
-
-        testdir = os.path.join(sources, "doc")
+        pkg = os.path.join(sources, 'pkg')
+        os.mkdir(pkg)
+        open(os.path.join(pkg, "__init__.py"), "w").close()
+        testdir = os.path.join(pkg, "doc")
         os.mkdir(testdir)
         open(os.path.join(testdir, "testfile"), "w").close()
 
         os.chdir(sources)
         old_stdout = sys.stdout
-        sys.stdout = StringIO.StringIO()
+        #sys.stdout = StringIO.StringIO()
 
         try:
             dist = Distribution({"packages": ["pkg"],
-                                 "package_dir": {"pkg": ""},
+                                 "package_dir": sources,
                                  "package_data": {"pkg": ["doc/*"]}})
             # script_name need not exist, it just need to be initialized
             dist.script_name = os.path.join(sources, "setup.py")
@@ -89,7 +93,7 @@
 
             try:
                 dist.run_commands()
-            except DistutilsFileError:
+            except DistutilsFileError, e:
                 self.fail("failed package_data test when package_dir is ''")
         finally:
             # Restore state.
diff --git a/distutils2/tests/test_command_install_dist.py b/distutils2/tests/test_command_install_dist.py
--- a/distutils2/tests/test_command_install_dist.py
+++ b/distutils2/tests/test_command_install_dist.py
@@ -180,8 +180,8 @@
             cmd.user = 'user'
             self.assertRaises(DistutilsOptionError, cmd.finalize_options)
 
-    def test_record(self):
-
+    def test_old_record(self):
+        # test pre-PEP 376 --record option (outside dist-info dir)
         install_dir = self.mkdtemp()
         pkgdir, dist = self.create_dist()
 
@@ -189,11 +189,11 @@
         cmd = install_dist(dist)
         dist.command_obj['install_dist'] = cmd
         cmd.root = install_dir
-        cmd.record = os.path.join(pkgdir, 'RECORD')
+        cmd.record = os.path.join(pkgdir, 'filelist')
         cmd.ensure_finalized()
         cmd.run()
 
-        # let's check the RECORD file was created with four
+        # let's check the record file was created with four
         # lines, one for each .dist-info entry: METADATA,
         # INSTALLER, REQUSTED, RECORD
         f = open(cmd.record)
diff --git a/distutils2/tests/test_config.py b/distutils2/tests/test_config.py
--- a/distutils2/tests/test_config.py
+++ b/distutils2/tests/test_config.py
@@ -5,6 +5,8 @@
 from StringIO import StringIO
 
 from distutils2.tests import unittest, support, run_unittest
+from distutils2.command.sdist import sdist
+from distutils2.errors import DistutilsFileError
 
 
 SETUP_CFG = """
@@ -16,7 +18,7 @@
 maintainer = Éric Araujo
 maintainer_email = merwok at netwok.org
 summary = A sample project demonstrating distutils2 packaging
-description-file = README
+description-file = %(description-file)s
 keywords = distutils2, packaging, sample project
 
 classifier =
@@ -47,9 +49,11 @@
   Fork in progress, http://bitbucket.org/Merwok/sample-distutils2-project
 
 [files]
+packages_root = src
+
 packages = one
-           src:two
-           src2:three
+           two
+           three
 
 modules = haven
 
@@ -66,6 +70,8 @@
   config = cfg/data.cfg
   /etc/init.d = init-script
 
+extra_files = %(extra-files)s
+
 # Replaces MANIFEST.in
 sdist_extra =
   include THANKS HACKING
@@ -130,22 +136,33 @@
         self.addCleanup(setattr, sys, 'stderr', sys.stderr)
         self.addCleanup(os.chdir, os.getcwd())
 
-    def test_config(self):
-        tempdir = self.mkdtemp()
-        os.chdir(tempdir)
-        self.write_file('setup.cfg', SETUP_CFG)
-        self.write_file('README', 'yeah')
+    def write_setup(self, kwargs=None):
+        opts = {'description-file': 'README', 'extra-files':''}
+        if kwargs:
+            opts.update(kwargs)
+        self.write_file('setup.cfg', SETUP_CFG % opts)
 
-        # try to load the metadata now
+
+    def run_setup(self, *args):
+        # run setup with args
         sys.stdout = StringIO()
-        sys.argv[:] = ['setup.py', '--version']
+        sys.argv[:] = [''] + list(args)
         old_sys = sys.argv[:]
-
         try:
             from distutils2.run import commands_main
             dist = commands_main()
         finally:
             sys.argv[:] = old_sys
+        return dist
+
+    def test_config(self):
+        tempdir = self.mkdtemp()
+        os.chdir(tempdir)
+        self.write_setup()
+        self.write_file('README', 'yeah')
+
+        # try to load the metadata now
+        dist = self.run_setup('--version')
 
         # sanity check
         self.assertEqual(sys.stdout.getvalue(), '0.6.4.dev1' + os.linesep)
@@ -184,7 +201,6 @@
                  'http://bitbucket.org/Merwok/sample-distutils2-project')]
         self.assertEqual(dist.metadata['Project-Url'], urls)
 
-
         self.assertEqual(dist.packages, ['one', 'two', 'three'])
         self.assertEqual(dist.py_modules, ['haven'])
         self.assertEqual(dist.package_data, {'cheese': 'data/templates/*'})
@@ -192,7 +208,8 @@
             [('bitmaps ', ['bm/b1.gif', 'bm/b2.gif']),
              ('config ', ['cfg/data.cfg']),
              ('/etc/init.d ', ['init-script'])])
-        self.assertEqual(dist.package_dir['two'], 'src')
+
+        self.assertEqual(dist.package_dir, 'src')
 
         # Make sure we get the foo command loaded.  We use a string comparison
         # instead of assertIsInstance because the class is not the same when
@@ -213,10 +230,94 @@
         d = new_compiler(compiler='d')
         self.assertEqual(d.description, 'D Compiler')
 
+
+    def test_multiple_description_file(self):
+        tempdir = self.mkdtemp()
+        os.chdir(tempdir)
+
+        self.write_setup({'description-file': 'README  CHANGES'})
+        self.write_file('README', 'yeah')
+        self.write_file('CHANGES', 'changelog2')
+        dist = self.run_setup('--version')
+        self.assertEqual(dist.metadata.requires_files, ['README', 'CHANGES'])
+
+    def test_multiline_description_file(self):
+        tempdir = self.mkdtemp()
+        os.chdir(tempdir)
+
+        self.write_setup({'description-file': 'README\n  CHANGES'})
+        self.write_file('README', 'yeah')
+        self.write_file('CHANGES', 'changelog')
+        dist = self.run_setup('--version')
+        self.assertEqual(dist.metadata['description'], 'yeah\nchangelog')
+        self.assertEqual(dist.metadata.requires_files, ['README', 'CHANGES'])
+
+    def test_metadata_requires_description_files_missing(self):
+        tempdir = self.mkdtemp()
+        os.chdir(tempdir)
+        self.write_setup({'description-file': 'README\n  README2'})
+        self.write_file('README', 'yeah')
+        self.write_file('README2', 'yeah')
+        self.write_file('haven.py', '#')
+        self.write_file('script1.py', '#')
+        os.mkdir('scripts')
+        self.write_file(os.path.join('scripts', 'find-coconuts'), '#')
+        os.mkdir('bin')
+        self.write_file(os.path.join('bin', 'taunt'), '#')
+
+        os.mkdir('src')
+        for pkg in ('one', 'two', 'three'):
+            pkg = os.path.join('src', pkg)
+            os.mkdir(pkg)
+            self.write_file(os.path.join(pkg, '__init__.py'), '#')
+
+        dist = self.run_setup('--version')
+        cmd = sdist(dist)
+        cmd.finalize_options()
+        cmd.get_file_list()
+        self.assertRaises(DistutilsFileError, cmd.make_distribution)
+
+    def test_metadata_requires_description_files(self):
+        tempdir = self.mkdtemp()
+        os.chdir(tempdir)
+        self.write_setup({'description-file': 'README\n  README2',
+                          'extra-files':'\n  README2'})
+        self.write_file('README', 'yeah')
+        self.write_file('README2', 'yeah')
+        self.write_file('haven.py', '#')
+        self.write_file('script1.py', '#')
+        os.mkdir('scripts')
+        self.write_file(os.path.join('scripts', 'find-coconuts'), '#')
+        os.mkdir('bin')
+        self.write_file(os.path.join('bin', 'taunt'), '#')
+
+        os.mkdir('src')
+        for pkg in ('one', 'two', 'three'):
+            pkg = os.path.join('src', pkg)
+            os.mkdir(pkg)
+            self.write_file(os.path.join(pkg, '__init__.py'), '#')
+
+        dist = self.run_setup('--description')
+        self.assertIn('yeah\nyeah\n', sys.stdout.getvalue())
+
+        cmd = sdist(dist)
+        cmd.finalize_options()
+        cmd.get_file_list()
+        self.assertRaises(DistutilsFileError, cmd.make_distribution)
+
+        self.write_setup({'description-file': 'README\n  README2',
+                          'extra-files': '\n  README2\n    README'})
+        dist = self.run_setup('--description')
+        cmd = sdist(dist)
+        cmd.finalize_options()
+        cmd.get_file_list()
+        cmd.make_distribution()
+        self.assertIn('README\nREADME2\n', open('MANIFEST').read())
+
     def test_sub_commands(self):
         tempdir = self.mkdtemp()
         os.chdir(tempdir)
-        self.write_file('setup.cfg', SETUP_CFG)
+        self.write_setup()
         self.write_file('README', 'yeah')
         self.write_file('haven.py', '#')
         self.write_file('script1.py', '#')
@@ -224,20 +325,15 @@
         self.write_file(os.path.join('scripts', 'find-coconuts'), '#')
         os.mkdir('bin')
         self.write_file(os.path.join('bin', 'taunt'), '#')
+        os.mkdir('src')
 
-        for pkg in ('one', 'src', 'src2'):
+        for pkg in ('one', 'two', 'three'):
+            pkg = os.path.join('src', pkg)
             os.mkdir(pkg)
             self.write_file(os.path.join(pkg, '__init__.py'), '#')
 
         # try to run the install command to see if foo is called
-        sys.stdout = sys.stderr = StringIO()
-        sys.argv[:] = ['', 'install_dist']
-        old_sys = sys.argv[:]
-        try:
-            from distutils2.run import main
-            dist = main()
-        finally:
-            sys.argv[:] = old_sys
+        dist = self.run_setup('install_dist')
 
         self.assertEqual(dist.foo_was_here, 1)
 
diff --git a/distutils2/tests/test_install.py b/distutils2/tests/test_install.py
--- a/distutils2/tests/test_install.py
+++ b/distutils2/tests/test_install.py
@@ -29,27 +29,16 @@
 class ToInstallDist(object):
     """Distribution that will be installed"""
 
-    def __init__(self, raise_error=False, files=False):
-        self._raise_error = raise_error
+    def __init__(self, files=False):
         self._files = files
-        self.install_called = False
-        self.install_called_with = {}
         self.uninstall_called = False
         self._real_files = []
+        self.name = "fake"
+        self.version = "fake"
         if files:
             for f in range(0,3):
                self._real_files.append(mkstemp())
 
-    def install(self, *args):
-        self.install_called = True
-        self.install_called_with = args
-        if self._raise_error:
-            raise Exception('Oops !')
-        return ['/path/to/foo', '/path/to/bar']
-
-    def uninstall(self, **args):
-        self.uninstall_called = True
-
     def get_installed_files(self, **args):
         if self._files:
             return [f[1] for f in self._real_files]
@@ -58,7 +47,49 @@
         return self.get_installed_files()
 
 
+class MagicMock(object):
+    def __init__(self, return_value=None, raise_exception=False):
+        self.called = False
+        self._times_called = 0
+        self._called_with = []
+        self._return_value = return_value
+        self._raise = raise_exception
+    
+    def __call__(self, *args, **kwargs):
+        self.called = True
+        self._times_called = self._times_called + 1
+        self._called_with.append((args, kwargs))
+        iterable = hasattr(self._raise, '__iter__')
+        if self._raise:
+            if ((not iterable and self._raise) 
+                    or self._raise[self._times_called - 1]):
+                raise Exception
+        return self._return_value
+
+    def called_with(self, *args, **kwargs):
+        return (args, kwargs) in self._called_with
+
+
+def patch(parent, to_patch):
+    """monkey match a module"""
+    def wrapper(func):
+        print func
+        print dir(func)
+        old_func = getattr(parent, to_patch)
+        def wrapped(*args, **kwargs):
+            parent.__dict__[to_patch] = MagicMock()
+            try:
+                out = func(*args, **kwargs)
+            finally:
+                setattr(parent, to_patch, old_func)
+            return out
+        return wrapped
+    return wrapper
+
+
 def get_installed_dists(dists):
+    """Return a list of fake installed dists. 
+    The list is name, version, deps"""
     objects = []
     for (name, version, deps) in dists:
         objects.append(InstalledDist(name, version, deps))
@@ -69,6 +100,12 @@
     def _get_client(self, server, *args, **kwargs):
         return Client(server.full_address, *args, **kwargs)
 
+    def _patch_run_install(self):
+        """Patch run install"""
+
+    def _unpatch_run_install(self):
+        """Unpatch run install for d2 and d1"""
+
     def _get_results(self, output):
         """return a list of results"""
         installed = [(o.name, '%s' % o.version) for o in output['install']]
@@ -150,6 +187,8 @@
         # Tests that conflicts are detected
         client = self._get_client(server)
         archive_path = '%s/distribution.tar.gz' % server.full_address
+
+        # choxie depends on towel-stuff, which depends on bacon.
         server.xmlrpc.set_distributions([
             {'name':'choxie',
              'version': '2.0.0.9',
@@ -164,7 +203,9 @@
              'requires_dist': [],
              'url': archive_path},
             ])
-        already_installed = [('bacon', '0.1', []),
+
+        # name, version, deps.
+        already_installed = [('bacon', '0.1', []), 
                              ('chicken', '1.1', ['bacon (0.1)'])]
         output = install.get_infos("choxie", index=client, installed=
                            get_installed_dists(already_installed))
@@ -221,23 +262,39 @@
         # if one of the distribution installation fails, call uninstall on all
         # installed distributions.
 
-        d1 = ToInstallDist()
-        d2 = ToInstallDist(raise_error=True)
-        self.assertRaises(Exception, install.install_dists, [d1, d2])
-        for dist in (d1, d2):
-            self.assertTrue(dist.install_called)
-        self.assertTrue(d1.uninstall_called)
-        self.assertFalse(d2.uninstall_called)
+        old_install_dist = install._install_dist
+        old_uninstall = getattr(install, 'uninstall', None)
+
+        install._install_dist = MagicMock(return_value=[], 
+                raise_exception=(False, True))
+        install.uninstall = MagicMock()
+        try:
+            d1 = ToInstallDist()
+            d2 = ToInstallDist()
+            path = self.mkdtemp()
+            self.assertRaises(Exception, install.install_dists, [d1, d2], path)
+            self.assertTrue(install._install_dist.called_with(d1, path))
+            self.assertTrue(install.uninstall.called)
+        finally:
+            install._install_dist = old_install_dist
+            install.uninstall = old_uninstall
+
 
     def test_install_dists_success(self):
-        # test that the install method is called on each of the distributions.
-        d1 = ToInstallDist()
-        d2 = ToInstallDist()
-        install.install_dists([d1, d2])
-        for dist in (d1, d2):
-            self.assertTrue(dist.install_called)
-        self.assertFalse(d1.uninstall_called)
-        self.assertFalse(d2.uninstall_called)
+        old_install_dist = install._install_dist
+        install._install_dist = MagicMock(return_value=[])
+        try:
+            # test that the install method is called on each of the distributions.
+            d1 = ToInstallDist()
+            d2 = ToInstallDist()
+
+            # should call install
+            path = self.mkdtemp()
+            install.install_dists([d1, d2], path)
+            for dist in (d1, d2):
+                self.assertTrue(install._install_dist.called_with(dist, path))
+        finally:
+            install._install_dist = old_install_dist
 
     def test_install_from_infos_conflict(self):
         # assert conflicts raise an exception
@@ -262,29 +319,46 @@
             install.install_dists = old_install_dists
 
     def test_install_from_infos_remove_rollback(self):
-        # assert that if an error occurs, the removed files are restored.
-        remove = []
-        for i in range(0,2):
-            remove.append(ToInstallDist(files=True, raise_error=True))
-        to_install = [ToInstallDist(raise_error=True),
-                   ToInstallDist()]
+        old_install_dist = install._install_dist
+        old_uninstall = getattr(install, 'uninstall', None)
 
-        install.install_from_infos(remove=remove, install=to_install)
-        # assert that the files are in the same place
-        # assert that the files have been removed
-        for dist in remove:
-            for f in dist.get_installed_files():
-                self.assertTrue(os.path.exists(f))
+        install._install_dist = MagicMock(return_value=[], 
+                raise_exception=(False, True))
+        install.uninstall = MagicMock()
+        try:
+            # assert that if an error occurs, the removed files are restored.
+            remove = []
+            for i in range(0,2):
+                remove.append(ToInstallDist(files=True))
+            to_install = [ToInstallDist(), ToInstallDist()]
+
+            self.assertRaises(Exception, install.install_from_infos, 
+                    remove=remove, install=to_install)
+            # assert that the files are in the same place
+            # assert that the files have been removed
+            for dist in remove:
+                for f in dist.get_installed_files():
+                    self.assertTrue(os.path.exists(f))
+        finally:
+            install.install_dist = old_install_dist
+            install.uninstall = old_uninstall
+
 
     def test_install_from_infos_install_succes(self):
-        # assert that the distribution can be installed
-        install_path = "my_install_path"
-        to_install = [ToInstallDist(), ToInstallDist()]
+        old_install_dist = install._install_dist
+        install._install_dist = MagicMock([])
+        try:
+            # assert that the distribution can be installed
+            install_path = "my_install_path"
+            to_install = [ToInstallDist(), ToInstallDist()]
 
-        install.install_from_infos(install=to_install,
-                                         install_path=install_path)
-        for dist in to_install:
-            self.assertEqual(dist.install_called_with, (install_path,))
+            install.install_from_infos(install=to_install,
+                                             install_path=install_path)
+            for dist in to_install:
+                install._install_dist.called_with(install_path)
+        finally:
+            install._install_dist = old_install_dist
+
 
 def test_suite():
     suite = unittest.TestSuite()
diff --git a/distutils2/tests/test_markers.py b/distutils2/tests/test_markers.py
new file mode 100644
--- /dev/null
+++ b/distutils2/tests/test_markers.py
@@ -0,0 +1,69 @@
+"""Tests for distutils.metadata."""
+import os
+import sys
+import platform
+from StringIO import StringIO
+
+from distutils2.markers import interpret
+from distutils2.tests import run_unittest, unittest
+from distutils2.tests.support import LoggingCatcher, WarningsCatcher
+
+
+class MarkersTestCase(LoggingCatcher, WarningsCatcher,
+                      unittest.TestCase):
+
+    def test_interpret(self):
+        sys_platform = sys.platform
+        version = sys.version.split()[0]
+        os_name = os.name
+        platform_version = platform.version()
+        platform_machine = platform.machine()
+
+        self.assertTrue(interpret("sys.platform == '%s'" % sys_platform))
+        self.assertTrue(interpret(
+            "sys.platform == '%s' or python_version == '2.4'" % sys_platform))
+        self.assertTrue(interpret(
+            "sys.platform == '%s' and python_full_version == '%s'" %
+            (sys_platform, version)))
+        self.assertTrue(interpret("'%s' == sys.platform" % sys_platform))
+        self.assertTrue(interpret('os.name == "%s"' % os_name))
+        self.assertTrue(interpret(
+            'platform.version == "%s" and platform.machine == "%s"' %
+            (platform_version, platform_machine)))
+
+        # stuff that need to raise a syntax error
+        ops = ('os.name == os.name', 'os.name == 2', "'2' == '2'",
+               'okpjonon', '', 'os.name ==', 'python_version == 2.4')
+        for op in ops:
+            self.assertRaises(SyntaxError, interpret, op)
+
+        # combined operations
+        OP = 'os.name == "%s"' % os_name
+        AND = ' and '
+        OR = ' or '
+        self.assertTrue(interpret(OP + AND + OP))
+        self.assertTrue(interpret(OP + AND + OP + AND + OP))
+        self.assertTrue(interpret(OP + OR + OP))
+        self.assertTrue(interpret(OP + OR + OP + OR + OP))
+
+        # other operators
+        self.assertTrue(interpret("os.name != 'buuuu'"))
+        self.assertTrue(interpret("python_version > '1.0'"))
+        self.assertTrue(interpret("python_version < '5.0'"))
+        self.assertTrue(interpret("python_version <= '5.0'"))
+        self.assertTrue(interpret("python_version >= '1.0'"))
+        self.assertTrue(interpret("'%s' in os.name" % os_name))
+        self.assertTrue(interpret("'buuuu' not in os.name"))
+        self.assertTrue(interpret(
+            "'buuuu' not in os.name and '%s' in os.name" % os_name))
+
+        # execution context
+        self.assertTrue(interpret('python_version == "0.1"',
+                                  {'python_version': '0.1'}))
+
+
+def test_suite():
+    return unittest.makeSuite(MarkersTestCase)
+
+if __name__ == '__main__':
+    run_unittest(test_suite())
diff --git a/distutils2/tests/test_metadata.py b/distutils2/tests/test_metadata.py
--- a/distutils2/tests/test_metadata.py
+++ b/distutils2/tests/test_metadata.py
@@ -1,10 +1,10 @@
-"""Tests for distutils.command.bdist."""
+"""Tests for distutils.metadata."""
 import os
 import sys
 import platform
 from StringIO import StringIO
 
-from distutils2.metadata import (DistributionMetadata, _interpret,
+from distutils2.metadata import (DistributionMetadata,
                                  PKG_INFO_PREFERRED_VERSION)
 from distutils2.tests import run_unittest, unittest
 from distutils2.tests.support import LoggingCatcher, WarningsCatcher
@@ -46,55 +46,6 @@
         self.assertRaises(TypeError, DistributionMetadata,
                           PKG_INFO, mapping=m, fileobj=fp)
 
-    def test_interpret(self):
-        sys_platform = sys.platform
-        version = sys.version.split()[0]
-        os_name = os.name
-        platform_version = platform.version()
-        platform_machine = platform.machine()
-
-        self.assertTrue(_interpret("sys.platform == '%s'" % sys_platform))
-        self.assertTrue(_interpret(
-            "sys.platform == '%s' or python_version == '2.4'" % sys_platform))
-        self.assertTrue(_interpret(
-            "sys.platform == '%s' and python_full_version == '%s'" %
-            (sys_platform, version)))
-        self.assertTrue(_interpret("'%s' == sys.platform" % sys_platform))
-        self.assertTrue(_interpret('os.name == "%s"' % os_name))
-        self.assertTrue(_interpret(
-            'platform.version == "%s" and platform.machine == "%s"' %
-            (platform_version, platform_machine)))
-
-        # stuff that need to raise a syntax error
-        ops = ('os.name == os.name', 'os.name == 2', "'2' == '2'",
-               'okpjonon', '', 'os.name ==', 'python_version == 2.4')
-        for op in ops:
-            self.assertRaises(SyntaxError, _interpret, op)
-
-        # combined operations
-        OP = 'os.name == "%s"' % os_name
-        AND = ' and '
-        OR = ' or '
-        self.assertTrue(_interpret(OP + AND + OP))
-        self.assertTrue(_interpret(OP + AND + OP + AND + OP))
-        self.assertTrue(_interpret(OP + OR + OP))
-        self.assertTrue(_interpret(OP + OR + OP + OR + OP))
-
-        # other operators
-        self.assertTrue(_interpret("os.name != 'buuuu'"))
-        self.assertTrue(_interpret("python_version > '1.0'"))
-        self.assertTrue(_interpret("python_version < '5.0'"))
-        self.assertTrue(_interpret("python_version <= '5.0'"))
-        self.assertTrue(_interpret("python_version >= '1.0'"))
-        self.assertTrue(_interpret("'%s' in os.name" % os_name))
-        self.assertTrue(_interpret("'buuuu' not in os.name"))
-        self.assertTrue(_interpret(
-            "'buuuu' not in os.name and '%s' in os.name" % os_name))
-
-        # execution context
-        self.assertTrue(_interpret('python_version == "0.1"',
-                                   {'python_version': '0.1'}))
-
     def test_metadata_read_write(self):
         PKG_INFO = os.path.join(os.path.dirname(__file__), 'PKG-INFO')
         metadata = DistributionMetadata(PKG_INFO)
diff --git a/distutils2/util.py b/distutils2/util.py
--- a/distutils2/util.py
+++ b/distutils2/util.py
@@ -15,6 +15,7 @@
 from copy import copy
 from fnmatch import fnmatchcase
 from ConfigParser import RawConfigParser
+from inspect import getsource
 
 from distutils2.errors import (DistutilsPlatformError, DistutilsFileError,
                                DistutilsByteCompileError, DistutilsExecError)
@@ -674,83 +675,6 @@
     return base, ext
 
 
-def unzip_file(filename, location, flatten=True):
-    """Unzip the file (zip file located at filename) to the destination
-    location"""
-    if not os.path.exists(location):
-        os.makedirs(location)
-    zipfp = open(filename, 'rb')
-    try:
-        zip = zipfile.ZipFile(zipfp)
-        leading = has_leading_dir(zip.namelist()) and flatten
-        for name in zip.namelist():
-            data = zip.read(name)
-            fn = name
-            if leading:
-                fn = split_leading_dir(name)[1]
-            fn = os.path.join(location, fn)
-            dir = os.path.dirname(fn)
-            if not os.path.exists(dir):
-                os.makedirs(dir)
-            if fn.endswith('/') or fn.endswith('\\'):
-                # A directory
-                if not os.path.exists(fn):
-                    os.makedirs(fn)
-            else:
-                fp = open(fn, 'wb')
-                try:
-                    fp.write(data)
-                finally:
-                    fp.close()
-    finally:
-        zipfp.close()
-
-
-def untar_file(filename, location):
-    """Untar the file (tar file located at filename) to the destination
-    location
-    """
-    if not os.path.exists(location):
-        os.makedirs(location)
-    if filename.lower().endswith('.gz') or filename.lower().endswith('.tgz'):
-        mode = 'r:gz'
-    elif (filename.lower().endswith('.bz2')
-          or filename.lower().endswith('.tbz')):
-        mode = 'r:bz2'
-    elif filename.lower().endswith('.tar'):
-        mode = 'r'
-    else:
-        mode = 'r:*'
-    tar = tarfile.open(filename, mode)
-    try:
-        leading = has_leading_dir([member.name for member in tar.getmembers()])
-        for member in tar.getmembers():
-            fn = member.name
-            if leading:
-                fn = split_leading_dir(fn)[1]
-            path = os.path.join(location, fn)
-            if member.isdir():
-                if not os.path.exists(path):
-                    os.makedirs(path)
-            else:
-                try:
-                    fp = tar.extractfile(member)
-                except (KeyError, AttributeError):
-                    # Some corrupt tar files seem to produce this
-                    # (specifically bad symlinks)
-                    continue
-                if not os.path.exists(os.path.dirname(path)):
-                    os.makedirs(os.path.dirname(path))
-                destfp = open(path, 'wb')
-                try:
-                    shutil.copyfileobj(fp, destfp)
-                finally:
-                    destfp.close()
-                fp.close()
-    finally:
-        tar.close()
-
-
 def has_leading_dir(paths):
     """Returns true if all the paths have the same leading path name
     (i.e., everything is in one subdirectory in an archive)"""
@@ -1127,3 +1051,117 @@
         """ Issues a call to util.run_2to3. """
         return run_2to3(files, doctests_only, self.fixer_names,
                         self.options, self.explicit)
+
+
+def generate_distutils_kwargs_from_setup_cfg(file='setup.cfg'):
+    """ Distutils2 to distutils1 compatibility util.
+
+        This method uses an existing setup.cfg to generate a dictionnary of
+        keywords that can be used by distutils.core.setup(kwargs**).
+
+        :param file:
+            The setup.cfg path.
+        :raises DistutilsFileError:
+            When the setup.cfg file is not found.
+
+    """
+    # We need to declare the following constants here so that it's easier to
+    # generate the setup.py afterwards, using inspect.getsource.
+    D1_D2_SETUP_ARGS = {
+        # D1 name             : (D2_section, D2_name)
+        "name"                : ("metadata",),
+        "version"             : ("metadata",),
+        "author"              : ("metadata",),
+        "author_email"        : ("metadata",),
+        "maintainer"          : ("metadata",),
+        "maintainer_email"    : ("metadata",),
+        "url"                 : ("metadata", "home_page"),
+        "description"         : ("metadata", "summary"),
+        "long_description"    : ("metadata", "description"),
+        "download-url"        : ("metadata",),
+        "classifiers"         : ("metadata", "classifier"),
+        "platforms"           : ("metadata", "platform"), # Needs testing
+        "license"             : ("metadata",),
+        "requires"            : ("metadata", "requires_dist"),
+        "provides"            : ("metadata", "provides_dist"), # Needs testing
+        "obsoletes"           : ("metadata", "obsoletes_dist"), # Needs testing
+    
+        "packages"            : ("files",),
+        "scripts"             : ("files",),
+        "py_modules"          : ("files", "modules"), # Needs testing
+    }
+
+    MULTI_FIELDS = ("classifiers",
+                    "requires",
+                    "platforms",
+                    "packages",
+                    "scripts")
+
+    def has_get_option(config, section, option):
+        if config.has_option(section, option):
+            return config.get(section, option)
+        elif config.has_option(section, option.replace('_', '-')):
+            return config.get(section, option.replace('_', '-'))
+        else:
+            return False
+
+    # The method source code really starts here.
+    config = RawConfigParser()
+    if not os.path.exists(file):
+        raise DistutilsFileError("file '%s' does not exist" %
+                                 os.path.abspath(file))
+    config.read(file)
+
+    kwargs = {}
+    for arg in D1_D2_SETUP_ARGS:
+        if len(D1_D2_SETUP_ARGS[arg]) == 2:
+            # The distutils field name is different than distutils2's.
+            section, option = D1_D2_SETUP_ARGS[arg]
+
+        elif len(D1_D2_SETUP_ARGS[arg]) == 1:
+            # The distutils field name is the same thant distutils2's.
+            section = D1_D2_SETUP_ARGS[arg][0]
+            option = arg
+
+        in_cfg_value = has_get_option(config, section, option)
+        if not in_cfg_value:
+            # There is no such option in the setup.cfg
+            if arg == "long_description":
+                filename = has_get_option(config, section, "description_file")
+                print "We have a filename", filename
+                if filename:
+                    in_cfg_value = open(filename).read()
+            else:
+                continue
+
+        if arg in MULTI_FIELDS:
+            # Special behaviour when we have a multi line option
+            if "\n" in in_cfg_value:
+                in_cfg_value = in_cfg_value.strip().split('\n')
+            else:
+                in_cfg_value = list((in_cfg_value,))
+
+        kwargs[arg] = in_cfg_value
+
+    return kwargs
+
+
+def generate_distutils_setup_py():
+    """ Generate a distutils compatible setup.py using an existing setup.cfg.
+
+        :raises DistutilsFileError:
+            When a setup.py already exists.
+    """
+    if os.path.exists("setup.py"):
+        raise DistutilsFileError("A pre existing setup.py file exists")
+
+    handle = open("setup.py", "w")
+    handle.write("# Distutils script using distutils2 setup.cfg to call the\n")
+    handle.write("# distutils.core.setup() with the right args.\n\n\n")
+    handle.write("import os\n")
+    handle.write("from distutils.core import setup\n")
+    handle.write("from ConfigParser import RawConfigParser\n\n")
+    handle.write(getsource(generate_distutils_kwargs_from_setup_cfg))
+    handle.write("\n\nkwargs = generate_distutils_kwargs_from_setup_cfg()\n")
+    handle.write("setup(**kwargs)")
+    handle.close()
diff --git a/docs/source/distutils/apiref.rst b/docs/source/distutils/apiref.rst
--- a/docs/source/distutils/apiref.rst
+++ b/docs/source/distutils/apiref.rst
@@ -1055,6 +1055,13 @@
    Create a file called *filename* and write *contents* (a sequence of strings
    without line terminators) to it.
 
+:mod:`distutils2.metadata` --- Metadata handling
+================================================================
+
+.. module:: distutils2.metadata
+
+.. autoclass:: distutils2.metadata.DistributionMetadata
+   :members:
 
 :mod:`distutils2.util` --- Miscellaneous other utility functions
 ================================================================
diff --git a/docs/source/distutils/examples.rst b/docs/source/distutils/examples.rst
--- a/docs/source/distutils/examples.rst
+++ b/docs/source/distutils/examples.rst
@@ -301,7 +301,7 @@
 :class:`distutils2.dist.DistributionMetadata` class and its
 :func:`read_pkg_file` method::
 
-    >>> from distutils2.dist import DistributionMetadata
+    >>> from distutils2.metadata import DistributionMetadata
     >>> metadata = DistributionMetadata()
     >>> metadata.read_pkg_file(open('distribute-0.6.8-py2.7.egg-info'))
     >>> metadata.name
diff --git a/docs/source/library/distutils2.metadata.rst b/docs/source/library/distutils2.metadata.rst
--- a/docs/source/library/distutils2.metadata.rst
+++ b/docs/source/library/distutils2.metadata.rst
@@ -2,7 +2,9 @@
 Metadata
 ========
 
-Distutils2 provides a :class:`DistributionMetadata` class that can read and
+.. module:: distutils2.metadata
+
+Distutils2 provides a :class:`~distutils2.metadata.DistributionMetadata` class that can read and
 write metadata files. This class is compatible with all metadata versions:
 
 * 1.0: :PEP:`241`
@@ -17,7 +19,7 @@
 Reading metadata
 ================
 
-The :class:`DistributionMetadata` class can be instantiated with the path of
+The :class:`~distutils2.metadata.DistributionMetadata` class can be instantiated with the path of
 the metadata file, and provides a dict-like interface to the values::
 
     >>> from distutils2.metadata import DistributionMetadata
@@ -33,7 +35,7 @@
 
 The fields that supports environment markers can be automatically ignored if
 the object is instantiated using the ``platform_dependent`` option.
-:class:`DistributionMetadata` will interpret in the case the markers and will
+:class:`~distutils2.metadata.DistributionMetadata` will interpret in the case the markers and will
 automatically remove the fields that are not compliant with the running
 environment. Here's an example under Mac OS X. The win32 dependency
 we saw earlier is ignored::
diff --git a/docs/source/library/distutils2.tests.pypi_server.rst b/docs/source/library/distutils2.tests.pypi_server.rst
--- a/docs/source/library/distutils2.tests.pypi_server.rst
+++ b/docs/source/library/distutils2.tests.pypi_server.rst
@@ -77,6 +77,7 @@
         @use_pypi_server()
         def test_somthing(self, server):
             # your tests goes here
+            ...
 
 The decorator will instantiate the server for you, and run and stop it just
 before and after your method call. You also can pass the server initializer,
@@ -85,4 +86,4 @@
     class SampleTestCase(TestCase):
         @use_pypi_server("test_case_name")
         def test_something(self, server):
-            # something
+            ...
diff --git a/docs/source/library/pkgutil.rst b/docs/source/library/pkgutil.rst
--- a/docs/source/library/pkgutil.rst
+++ b/docs/source/library/pkgutil.rst
@@ -4,77 +4,204 @@
 .. module:: pkgutil
    :synopsis: Utilities to support packages.
 
-.. TODO Follow the reST conventions used in the stdlib
+This module provides utilities to manipulate packages: support for the
+Importer protocol defined in :PEP:`302` and implementation of the API
+described in :PEP:`376` to work with the database of installed Python
+distributions.
 
-This module provides functions to manipulate packages, as well as
-the necessary functions to provide support for the "Importer Protocol" as
-described in :PEP:`302` and for working with the database of installed Python
-distributions which is specified in :PEP:`376`. In addition to the functions
-required in :PEP:`376`, back support for older ``.egg`` and ``.egg-info``
-distributions is provided as well. These distributions are represented by the
-class :class:`~distutils2._backport.pkgutil.EggInfoDistribution` and most
-functions provide an extra argument ``use_egg_info`` which indicates if
-they should consider these old styled distributions. This document details
-first the functions and classes available and then presents several use cases.
-
+Import system utilities
+-----------------------
 
 .. function:: extend_path(path, name)
 
-   Extend the search path for the modules which comprise a package. Intended use is
-   to place the following code in a package's :file:`__init__.py`::
+   Extend the search path for the modules which comprise a package.  Intended
+   use is to place the following code in a package's :file:`__init__.py`::
 
       from pkgutil import extend_path
       __path__ = extend_path(__path__, __name__)
 
-   This will add to the package's ``__path__`` all subdirectories of directories on
-   ``sys.path`` named after the package.  This is useful if one wants to distribute
-   different parts of a single logical package as multiple directories.
+   This will add to the package's ``__path__`` all subdirectories of directories
+   on :data:`sys.path` named after the package.  This is useful if one wants to
+   distribute different parts of a single logical package as multiple
+   directories.
 
-   It also looks for :file:`\*.pkg` files beginning where ``*`` matches the *name*
-   argument.  This feature is similar to :file:`\*.pth` files (see the :mod:`site`
-   module for more information), except that it doesn't special-case lines starting
-   with ``import``.  A :file:`\*.pkg` file is trusted at face value: apart from
-   checking for duplicates, all entries found in a :file:`\*.pkg` file are added to
-   the path, regardless of whether they exist on the filesystem.  (This is a
-   feature.)
+   It also looks for :file:`\*.pkg` files beginning where ``*`` matches the
+   *name* argument.  This feature is similar to :file:`\*.pth` files (see the
+   :mod:`site` module for more information), except that it doesn't special-case
+   lines starting with ``import``.  A :file:`\*.pkg` file is trusted at face
+   value: apart from checking for duplicates, all entries found in a
+   :file:`\*.pkg` file are added to the path, regardless of whether they exist
+   on the filesystem.  (This is a feature.)
 
    If the input path is not a list (as is the case for frozen packages) it is
    returned unchanged.  The input path is not modified; an extended copy is
    returned.  Items are only appended to the copy at the end.
 
-   It is assumed that ``sys.path`` is a sequence.  Items of ``sys.path`` that are
-   not strings referring to existing directories are ignored. Unicode items on
-   ``sys.path`` that cause errors when used as filenames may cause this function
-   to raise an exception (in line with :func:`os.path.isdir` behavior).
+   It is assumed that :data:`sys.path` is a sequence.  Items of :data:`sys.path`
+   that are not strings referring to existing directories are ignored. Unicode
+   items on :data:`sys.path` that cause errors when used as filenames may cause
+   this function to raise an exception (in line with :func:`os.path.isdir`
+   behavior).
+
+
+.. class:: ImpImporter(dirname=None)
+
+   :pep:`302` Importer that wraps Python's "classic" import algorithm.
+
+   If *dirname* is a string, a :pep:`302` importer is created that searches that
+   directory.  If *dirname* is ``None``, a :pep:`302` importer is created that
+   searches the current :data:`sys.path`, plus any modules that are frozen or
+   built-in.
+
+   Note that :class:`ImpImporter` does not currently support being used by
+   placement on :data:`sys.meta_path`.
+
+
+.. class:: ImpLoader(fullname, file, filename, etc)
+
+   :pep:`302` Loader that wraps Python's "classic" import algorithm.
+
+
+.. function:: find_loader(fullname)
+
+   Find a :pep:`302` "loader" object for *fullname*.
+
+   If *fullname* contains dots, path must be the containing package's
+   ``__path__``.  Returns ``None`` if the module cannot be found or imported.
+   This function uses :func:`iter_importers`, and is thus subject to the same
+   limitations regarding platform-specific special import locations such as the
+   Windows registry.
+
+
+.. function:: get_importer(path_item)
+
+   Retrieve a :pep:`302` importer for the given *path_item*.
+
+   The returned importer is cached in :data:`sys.path_importer_cache` if it was
+   newly created by a path hook.
+
+   If there is no importer, a wrapper around the basic import machinery is
+   returned.  This wrapper is never inserted into the importer cache (None is
+   inserted instead).
+
+   The cache (or part of it) can be cleared manually if a rescan of
+   :data:`sys.path_hooks` is necessary.
+
+
+.. function:: get_loader(module_or_name)
+
+   Get a :pep:`302` "loader" object for *module_or_name*.
+
+   If the module or package is accessible via the normal import mechanism, a
+   wrapper around the relevant part of that machinery is returned.  Returns
+   ``None`` if the module cannot be found or imported.  If the named module is
+   not already imported, its containing package (if any) is imported, in order
+   to establish the package ``__path__``.
+
+   This function uses :func:`iter_importers`, and is thus subject to the same
+   limitations regarding platform-specific special import locations such as the
+   Windows registry.
+
+
+.. function:: iter_importers(fullname='')
+
+   Yield :pep:`302` importers for the given module name.
+
+   If fullname contains a '.', the importers will be for the package containing
+   fullname, otherwise they will be importers for :data:`sys.meta_path`,
+   :data:`sys.path`, and Python's "classic" import machinery, in that order.  If
+   the named module is in a package, that package is imported as a side effect
+   of invoking this function.
+
+   Non-:pep:`302` mechanisms (e.g. the Windows registry) used by the standard
+   import machinery to find files in alternative locations are partially
+   supported, but are searched *after* :data:`sys.path`.  Normally, these
+   locations are searched *before* :data:`sys.path`, preventing :data:`sys.path`
+   entries from shadowing them.
+
+   For this to cause a visible difference in behaviour, there must be a module
+   or package name that is accessible via both :data:`sys.path` and one of the
+   non-:pep:`302` file system mechanisms.  In this case, the emulation will find
+   the former version, while the builtin import mechanism will find the latter.
+
+   Items of the following types can be affected by this discrepancy:
+   ``imp.C_EXTENSION``, ``imp.PY_SOURCE``, ``imp.PY_COMPILED``,
+   ``imp.PKG_DIRECTORY``.
+
+
+.. function:: iter_modules(path=None, prefix='')
+
+   Yields ``(module_loader, name, ispkg)`` for all submodules on *path*, or, if
+   path is ``None``, all top-level modules on :data:`sys.path`.
+
+   *path* should be either ``None`` or a list of paths to look for modules in.
+
+   *prefix* is a string to output on the front of every module name on output.
+
+
+.. function:: walk_packages(path=None, prefix='', onerror=None)
+
+   Yields ``(module_loader, name, ispkg)`` for all modules recursively on
+   *path*, or, if path is ``None``, all accessible modules.
+
+   *path* should be either ``None`` or a list of paths to look for modules in.
+
+   *prefix* is a string to output on the front of every module name on output.
+
+   Note that this function must import all *packages* (*not* all modules!) on
+   the given *path*, in order to access the ``__path__`` attribute to find
+   submodules.
+
+   *onerror* is a function which gets called with one argument (the name of the
+   package which was being imported) if any exception occurs while trying to
+   import a package.  If no *onerror* function is supplied, :exc:`ImportError`\s
+   are caught and ignored, while all other exceptions are propagated,
+   terminating the search.
+
+   Examples::
+
+      # list all modules python can access
+      walk_packages()
+
+      # list all submodules of ctypes
+      walk_packages(ctypes.__path__, ctypes.__name__ + '.')
+
 
 .. function:: get_data(package, resource)
 
    Get a resource from a package.
 
-   This is a wrapper for the :pep:`302` loader :func:`get_data` API. The package
-   argument should be the name of a package, in standard module format
-   (foo.bar). The resource argument should be in the form of a relative
-   filename, using ``/`` as the path separator. The parent directory name
+   This is a wrapper for the :pep:`302` loader :func:`get_data` API.  The
+   *package* argument should be the name of a package, in standard module format
+   (``foo.bar``).  The *resource* argument should be in the form of a relative
+   filename, using ``/`` as the path separator.  The parent directory name
    ``..`` is not allowed, and nor is a rooted name (starting with a ``/``).
 
-   The function returns a binary string that is the contents of the
-   specified resource.
+   The function returns a binary string that is the contents of the specified
+   resource.
 
    For packages located in the filesystem, which have already been imported,
    this is the rough equivalent of::
 
-       d = os.path.dirname(sys.modules[package].__file__)
-       data = open(os.path.join(d, resource), 'rb').read()
+      d = os.path.dirname(sys.modules[package].__file__)
+      data = open(os.path.join(d, resource), 'rb').read()
 
    If the package cannot be located or loaded, or it uses a :pep:`302` loader
-   which does not support :func:`get_data`, then None is returned.
+   which does not support :func:`get_data`, then ``None`` is returned.
 
 
-API Reference
-=============
+Installed distributions database
+--------------------------------
 
-.. automodule:: distutils2._backport.pkgutil
-   :members:
+Installed Python distributions are represented by instances of
+:class:`~distutils2._backport.pkgutil.Distribution`, or its subclass
+:class:`~distutils2._backport.pkgutil.EggInfoDistribution` for legacy ``.egg``
+and ``.egg-info`` formats).  Most functions also provide an extra argument
+``use_egg_info`` to take legacy distributions into account.
+
+.. TODO write docs here, don't rely on automodule
+   classes: Distribution and descendents
+   functions: provides, obsoletes, replaces, etc.
 
 Caching
 +++++++
@@ -86,11 +213,10 @@
 :func:`~distutils2._backport.pkgutil.clear_cache`.
 
 
+Examples
+--------
 
-Example Usage
-=============
-
-Print All Information About a Distribution
+Print all information about a distribution
 ++++++++++++++++++++++++++++++++++++++++++
 
 Given a path to a ``.dist-info`` distribution, we shall print out all
@@ -182,7 +308,7 @@
   =====
   * It was installed as a dependency
 
-Find Out Obsoleted Distributions
+Find out obsoleted distributions
 ++++++++++++++++++++++++++++++++
 
 Now, we take tackle a different problem, we are interested in finding out
diff --git a/docs/source/setupcfg.rst b/docs/source/setupcfg.rst
--- a/docs/source/setupcfg.rst
+++ b/docs/source/setupcfg.rst
@@ -128,6 +128,8 @@
 
 This section describes the files included in the project.
 
+- **packages_root**: the root directory containing all packages. If not provided
+  Distutils2 will use the current directory.  *\*optional*
 - **packages**: a list of packages the project includes *\*optional* *\*multi*
 - **modules**: a list of packages the project includes *\*optional* *\*multi*
 - **scripts**: a list of scripts the project includes *\*optional* *\*multi*
@@ -136,6 +138,7 @@
 Example::
 
     [files]
+    packages_root = src
     packages =
             pypi2rpm
             pypi2rpm.command

--
Repository URL: http://hg.python.org/distutils2


More information about the Python-checkins mailing list