[Python-checkins] distutils2: removes some trailing spaces
tarek.ziade
python-checkins at python.org
Sun Jul 4 11:48:40 CEST 2010
tarek.ziade pushed be0c887b2eb1 to distutils2:
http://hg.python.org/distutils2/rev/be0c887b2eb1
changeset: 330:be0c887b2eb1
user: Alexis Metaireau <ametaireau at gmail.com>
date: Thu Jul 01 13:37:19 2010 +0200
summary: removes some trailing spaces
files: src/distutils2/pypi/dist.py, src/distutils2/pypi/simple.py, src/distutils2/tests/test_pypi_dist.py, src/distutils2/tests/test_pypi_simple.py
diff --git a/src/distutils2/pypi/dist.py b/src/distutils2/pypi/dist.py
--- a/src/distutils2/pypi/dist.py
+++ b/src/distutils2/pypi/dist.py
@@ -34,8 +34,8 @@
"""Build a Distribution from a url archive (egg or zip or tgz).
:param url: complete url of the distribution
- :param probable_dist_name: A probable name of the distribution.
- :param is_external: Tell if the url commes from an index or from
+ :param probable_dist_name: A probable name of the distribution.
+ :param is_external: Tell if the url commes from an index or from
an external URL.
"""
# if the url contains a md5 hash, get it.
@@ -57,10 +57,10 @@
name, version = split_archive_name(archive_name)
if extension_matched is True:
- return PyPIDistribution(name, version, url=url, hashname="md5",
+ return PyPIDistribution(name, version, url=url, hashname="md5",
hashval=md5_hash, is_external=is_external)
- def __init__(self, name, version, type=None, url=None, hashname=None,
+ def __init__(self, name, version, type=None, url=None, hashname=None,
hashval=None, is_external=True):
"""Create a new instance of PyPIDistribution.
@@ -71,7 +71,7 @@
:param hashname: the name of the hash we want to use. Refer to the
hashlib.new documentation for more information.
:param hashval: the hash value.
- :param is_external: we need to know if the provided url comes from an
+ :param is_external: we need to know if the provided url comes from an
index browsing, or from an external resource.
"""
@@ -84,13 +84,13 @@
# We store urls in dict, because we need to have a bit more informations
# than the simple URL. It will be used later to find the good url to
# use.
- # We have two _url* attributes: _url and _urls. _urls contains a list of
- # dict for the different urls, and _url contains the choosen url, in
+ # We have two _url* attributes: _url and _urls. _urls contains a list of
+ # dict for the different urls, and _url contains the choosen url, in
# order to dont make the selection process multiple times.
self._urls = []
self._url = None
self.add_url(url, hashname, hashval, is_external)
-
+
def add_url(self, url, hashname=None, hashval=None, is_external=True):
"""Add a new url to the list of urls"""
if hashname is not None:
@@ -118,7 +118,7 @@
if len(self._urls) > 1:
internals_urls = [u for u in self._urls \
if u['is_external'] == False]
- if len(internals_urls) >= 1:
+ if len(internals_urls) >= 1:
self._url = internals_urls[0]
if self._url is None:
self._url = self._urls[0]
@@ -136,7 +136,7 @@
if self.location is None:
url = self.url['url']
archive_name = urlparse.urlparse(url)[2].split('/')[-1]
- filename, headers = urllib.urlretrieve(url,
+ filename, headers = urllib.urlretrieve(url,
path + "/" + archive_name)
self.location = filename
self._check_md5(filename)
@@ -220,7 +220,7 @@
def append(self, o):
"""Append a new distribution to the list.
-
+
If a distribution with the same name and version exists, just grab the
URL informations and add a new new url for the existing one.
"""
@@ -231,7 +231,7 @@
dist.add_url(**o.url)
else:
super(PyPIDistributions, self).append(o)
-
+
def split_archive_name(archive_name, probable_name=None):
"""Split an archive name into two parts: name and version.
diff --git a/src/distutils2/pypi/simple.py b/src/distutils2/pypi/simple.py
--- a/src/distutils2/pypi/simple.py
+++ b/src/distutils2/pypi/simple.py
@@ -21,7 +21,7 @@
# -- Constants -----------------------------------------------
PYPI_DEFAULT_INDEX_URL = "http://pypi.python.org/simple/"
-DEFAULT_HOSTS = ("*",)
+DEFAULT_HOSTS = ("*",)
SOCKET_TIMEOUT = 15
USER_AGENT = "Python-urllib/%s distutils2/%s" % (
sys.version[:3], __distutils2_version__)
@@ -66,10 +66,10 @@
"""Class constructor.
:param index_url: the url of the simple index to search on.
- :param follow_externals: tell if following external links is needed or
+ :param follow_externals: tell if following external links is needed or
not. Default is False.
:param hosts: a list of hosts allowed to be processed while using
- follow_externals=True. Default behavior is to follow all
+ follow_externals=True. Default behavior is to follow all
hosts.
:param mirrors: a list of mirrors to check out if problems occurs while
working with the one given in "url"
@@ -107,8 +107,8 @@
requirements.
:param requirements: A project name and it's distribution, using
- version specifiers, as described in PEP345.
- :type requirements: You can pass either a version.VersionPredicate
+ version specifiers, as described in PEP345.
+ :type requirements: You can pass either a version.VersionPredicate
or a string.
"""
requirements = self._get_version_predicate(requirements)
@@ -134,7 +134,7 @@
Returns the complete absolute path to the downloaded archive.
- :param requirements: The same as the find attribute of `find`.
+ :param requirements: The same as the find attribute of `find`.
"""
return self.get(requirements).download(path=temp_path)
@@ -145,10 +145,10 @@
if isinstance(requirements, str):
requirements = VersionPredicate(requirements)
return requirements
-
+
@property
def index_url(self):
- return self._index_urls[self._current_index_url]
+ return self._index_urls[self._current_index_url]
def _switch_to_next_mirror(self):
"""Switch to the next mirror (eg. point self.index_url to the next
@@ -160,7 +160,7 @@
self._current_index_url = self._current_index_url + 1
else:
raise UnableToDownload("All mirrors fails")
-
+
def _is_browsable(self, url):
"""Tell if the given URL can be browsed or not.
@@ -207,14 +207,14 @@
def _process_url(self, url, project_name=None, follow_links=True):
"""Process an url and search for distributions packages.
- For each URL found, if it's a download, creates a PyPIdistribution
+ For each URL found, if it's a download, creates a PyPIdistribution
object. If it's a homepage and we can follow links, process it too.
:param url: the url to process
:param project_name: the project name we are searching for.
:param follow_links: Do not want to follow links more than from one
- level. This parameter tells if we want to follow
- the links we find (eg. run recursively this
+ level. This parameter tells if we want to follow
+ the links we find (eg. run recursively this
method on it)
"""
f = self._open_url(url)
@@ -233,7 +233,7 @@
if self._is_browsable(link) and follow_links:
self._process_url(link, project_name,
follow_links=False)
-
+
def _get_link_matcher(self, url):
"""Returns the right link matcher function of the given url
"""
@@ -258,7 +258,7 @@
self._htmldecode(match.group(1)))
if 'download' in rels or self._is_browsable(url):
# yield a list of (url, is_download)
- yield (urlparse.urljoin(base_url, url),
+ yield (urlparse.urljoin(base_url, url),
'download' in rels)
def _default_link_matcher(self, content, base_url):
@@ -279,7 +279,7 @@
url = self.index_url + name + "/"
self._process_url(url, name)
except DownloadError:
- # if an error occurs, try with the next index_url
+ # if an error occurs, try with the next index_url
# (provided by the mirrors)
self._switch_to_next_mirror()
self._distributions.clear()
diff --git a/src/distutils2/tests/test_pypi_dist.py b/src/distutils2/tests/test_pypi_dist.py
--- a/src/distutils2/tests/test_pypi_dist.py
+++ b/src/distutils2/tests/test_pypi_dist.py
@@ -145,7 +145,7 @@
# should be ok
Dist("FooBar", "0.1", hashname="md5", hashval="value")
- self.assertRaises(UnsupportedHashName, Dist, "FooBar", "0.1",
+ self.assertRaises(UnsupportedHashName, Dist, "FooBar", "0.1",
hashname="invalid_hashname", hashval="value")
diff --git a/src/distutils2/tests/test_pypi_simple.py b/src/distutils2/tests/test_pypi_simple.py
--- a/src/distutils2/tests/test_pypi_simple.py
+++ b/src/distutils2/tests/test_pypi_simple.py
@@ -136,7 +136,7 @@
self.assertTrue(index._is_browsable("pypi.test.tld/a/path"))
# specify a list of hosts we want to allow
- index = simple.SimpleIndex(follow_externals=True,
+ index = simple.SimpleIndex(follow_externals=True,
hosts=("*.test.tld",))
self.assertFalse(index._is_browsable("http://an-external.link/path"))
self.assertTrue(index._is_browsable("http://pypi.test.tld/a/path"))
@@ -209,13 +209,13 @@
# process the pages
index = self._get_simple_index(server, follow_externals=True)
index.find("foobar")
- # now it should have processed only pages with links rel="download"
+ # now it should have processed only pages with links rel="download"
# and rel="homepage"
- self.assertIn("%s/simple/foobar/" % server.full_address,
+ self.assertIn("%s/simple/foobar/" % server.full_address,
index._processed_urls) # it's the simple index page
- self.assertIn("%s/external/homepage.html" % server.full_address,
+ self.assertIn("%s/external/homepage.html" % server.full_address,
index._processed_urls) # the external homepage is rel="homepage"
- self.assertNotIn("%s/external/nonrel.html" % server.full_address,
+ self.assertNotIn("%s/external/nonrel.html" % server.full_address,
index._processed_urls) # this link contains no rel=*
self.assertNotIn("%s/unrelated-0.2.tar.gz" % server.full_address,
index._processed_urls) # linked from simple index (no rel)
@@ -229,22 +229,22 @@
server = PyPIServer("foo_bar_baz")
mirror = PyPIServer("foo_bar_baz")
mirror.start() # we dont start the server here
-
+
try:
# create the index using both servers
- index = simple.SimpleIndex(server.full_address + "/simple/",
+ index = simple.SimpleIndex(server.full_address + "/simple/",
hosts=('*',), timeout=1, # set the timeout to 1s for the tests
mirrors=[mirror.full_address + "/simple/",])
-
+
# this should not raise a timeout
self.assertEqual(4, len(index.find("foo")))
finally:
mirror.stop()
-
+
def test_simple_link_matcher(self):
"""Test that the simple link matcher yields the right links"""
index = simple.SimpleIndex(follow_externals=False)
-
+
# Here, we define:
# 1. one link that must be followed, cause it's a download one
# 2. one link that must *not* be followed, cause the is_browsable
@@ -262,7 +262,7 @@
# Test that the simple link matcher yield the good links.
generator = index._simple_link_matcher(content, index.index_url)
self.assertEqual(('http://dl-link1', True), generator.next())
- self.assertEqual(('%stest' % index.index_url, False),
+ self.assertEqual(('%stest' % index.index_url, False),
generator.next())
self.assertRaises(StopIteration, generator.next)
@@ -271,7 +271,7 @@
generator = index._simple_link_matcher(content, index.index_url)
self.assertEqual(('http://dl-link1', True), generator.next())
self.assertEqual(('http://dl-link2', False), generator.next())
- self.assertEqual(('%stest' % index.index_url, False),
+ self.assertEqual(('%stest' % index.index_url, False),
generator.next())
self.assertRaises(StopIteration, generator.next)
--
Repository URL: http://hg.python.org/distutils2
More information about the Python-checkins
mailing list