[Python-checkins] r64651 - sandbox/trunk/2to3/lib2to3/fixes/fix_imports.py
Collin Winter
collinw at gmail.com
Sat Jul 5 03:22:52 CEST 2008
On Tue, Jul 1, 2008 at 7:00 PM, brett.cannon <python-checkins at python.org> wrote:
> Author: brett.cannon
> Date: Wed Jul 2 04:00:11 2008
> New Revision: 64651
>
> Log:
> Update fix_imports for urllib. Had to change the fixer itself to handle modules
> that are split across several renames in 3.0.
So I realize that you already reverted this, but I thought to comment
anyway: this implementation is almost certainly wrong. Any fixer for
this particular PEP 3108 change will need to possibly insert extra
imports to handle the new urllib.request/error/etc modules. I think
this kind of thing should be broken into its own fixer, or at least
its own distinct functionality inside of fix_imports.
Nick Edds is working to speed up fix_imports by stripping out the
member matching stuff, which we don't believe to be actually useful in
the majority of module renamings. IMHO there should be two different
mappings: one for bare-bones module renames (StringIO -> io), and a
separate one for more complex renames, like urllib/urllib2. That will
allow optimizing the former larger pattern while preserving
flexibility for the rare cases in the latter.
Collin
> Modified:
> sandbox/trunk/2to3/lib2to3/fixes/fix_imports.py
>
> Modified: sandbox/trunk/2to3/lib2to3/fixes/fix_imports.py
> ==============================================================================
> --- sandbox/trunk/2to3/lib2to3/fixes/fix_imports.py (original)
> +++ sandbox/trunk/2to3/lib2to3/fixes/fix_imports.py Wed Jul 2 04:00:11 2008
> @@ -272,6 +272,35 @@
> 'commands': ('subprocess', ['getstatusoutput', 'getoutput']),
> 'UserString' : ('collections', ['UserString']),
> 'UserList' : ('collections', ['UserList']),
> + 'urllib' : (
> + 'urllib.request',
> + ['URLOpener', 'FancyURLOpener', 'urlretrieve',
> + '_urlopener', 'urlcleanup'],
> + 'urllib.parse',
> + ['quote', 'quote_plus', 'unquote', 'unquote_plus',
> + 'urlencode', 'pathname2url', 'url2pathname'],
> + 'urllib.error', ['ContentTooShortError'],),
> + 'urllib2' : (
> + 'urllib.request',
> + ['urlopen', 'install_opener', 'build_opener',
> + 'Request', 'OpenerDirector', 'BaseHandler',
> + 'HTTPDefaultErrorHandler', 'HTTPRedirectHandler',
> + 'HTTPCookieProcessor', 'ProxyHandler',
> + 'HTTPPasswordMgr',
> + 'HTTPPasswordMgrWithDefaultRealm',
> + 'AbstractBasicAuthHandler',
> + 'HTTPBasicAuthHandler', 'ProxyBasicAuthHandler',
> + 'AbstractDigestAuthHandler',
> + 'HTTPDigestAuthHander', 'ProxyDigestAuthHandler',
> + 'HTTPHandler', 'HTTPSHandler', 'FileHandler',
> + 'FTPHandler', 'CacheFTPHandler',
> + 'UnknownHandler'],
> + 'urllib.error', ['URLError', 'HTTPError'],),
> + 'urlparse' : ('urllib.parse',
> + ['urlparse', 'urlunparse', 'urlsplit',
> + 'urlunsplit', 'urljoin', 'urldefrag',
> + 'ParseResult', 'SplitResult']),
> + 'robotparser' : ('urllib.robotparser', ['RobotFileParser']),
> }
>
>
> @@ -281,24 +310,26 @@
>
> def build_pattern():
> bare = set()
> - for old_module, (new_module, members) in MAPPING.items():
> - bare.add(old_module)
> - bare.update(members)
> - members = alternates(members)
> - yield """import_name< 'import' (module=%r
> - | dotted_as_names< any* module=%r any* >) >
> - """ % (old_module, old_module)
> - yield """import_from< 'from' module_name=%r 'import'
> - ( %s | import_as_name< %s 'as' any > |
> - import_as_names< any* >) >
> - """ % (old_module, members, members)
> - yield """import_from< 'from' module_name=%r 'import' star='*' >
> - """ % old_module
> - yield """import_name< 'import'
> - dotted_as_name< module_name=%r 'as' any > >
> - """ % old_module
> - yield """power< module_name=%r trailer< '.' %s > any* >
> - """ % (old_module, members)
> + for old_module, changes in MAPPING.items():
> + changes_iter = iter(changes)
> + for new_module, members in zip(changes_iter, changes_iter):
> + bare.add(old_module)
> + bare.update(members)
> + members = alternates(members)
> + yield """import_name< 'import' (module=%r
> + | dotted_as_names< any* module=%r any* >) >
> + """ % (old_module, old_module)
> + yield """import_from< 'from' module_name=%r 'import'
> + ( %s | import_as_name< %s 'as' any > |
> + import_as_names< any* >) >
> + """ % (old_module, members, members)
> + yield """import_from< 'from' module_name=%r 'import' star='*' >
> + """ % old_module
> + yield """import_name< 'import'
> + dotted_as_name< module_name=%r 'as' any > >
> + """ % old_module
> + yield """power< module_name=%r trailer< '.' %s > any* >
> + """ % (old_module, members)
> yield """bare_name=%s""" % alternates(bare)
>
>
> @@ -328,16 +359,19 @@
> star = results.get("star")
>
> if import_mod or mod_name:
> - new_name, members = MAPPING[(import_mod or mod_name).value]
> -
> - if import_mod:
> - self.replace[import_mod.value] = new_name
> - import_mod.replace(Name(new_name, prefix=import_mod.get_prefix()))
> - elif mod_name:
> - if star:
> - self.cannot_convert(node, "Cannot handle star imports.")
> - else:
> - mod_name.replace(Name(new_name, prefix=mod_name.get_prefix()))
> + changes = MAPPING[(import_mod or mod_name).value]
> + changes_iter = iter(changes)
> + for new_name, members in zip(changes_iter, changes_iter):
> + if import_mod:
> + self.replace[import_mod.value] = new_name
> + import_mod.replace(Name(new_name,
> + prefix=import_mod.get_prefix()))
> + elif mod_name:
> + if star:
> + self.cannot_convert(node, "Cannot handle star imports.")
> + else:
> + mod_name.replace(Name(new_name,
> + prefix=mod_name.get_prefix()))
> elif bare_name:
> bare_name = bare_name[0]
> new_name = self.replace.get(bare_name.value)
> _______________________________________________
> Python-checkins mailing list
> Python-checkins at python.org
> http://mail.python.org/mailman/listinfo/python-checkins
>
More information about the Python-checkins
mailing list