[Python-checkins] r53729 - in sandbox/trunk/2to3: Grammar.txt pgen2/grammar.py refactor.py tokenize.py

guido.van.rossum python-checkins at python.org
Sun Feb 11 01:18:47 CET 2007


Author: guido.van.rossum
Date: Sun Feb 11 01:18:46 2007
New Revision: 53729

Modified:
   sandbox/trunk/2to3/Grammar.txt
   sandbox/trunk/2to3/pgen2/grammar.py
   sandbox/trunk/2to3/refactor.py
   sandbox/trunk/2to3/tokenize.py
Log:
Tweaks to the grammar and tokenizer to support parsing Py3k source code.
This has become necessary now that I'm converting the stdelib one fix at a time.


Modified: sandbox/trunk/2to3/Grammar.txt
==============================================================================
--- sandbox/trunk/2to3/Grammar.txt	(original)
+++ sandbox/trunk/2to3/Grammar.txt	Sun Feb 11 01:18:46 2007
@@ -33,13 +33,20 @@
 
 decorator: '@' dotted_name [ '(' [arglist] ')' ] NEWLINE
 decorators: decorator+
-funcdef: [decorators] 'def' NAME parameters ':' suite
-parameters: '(' [varargslist] ')'
-varargslist: ((fpdef ['=' test] ',')*
-              ('*' NAME [',' '**' NAME] | '**' NAME) |
-              fpdef ['=' test] (',' fpdef ['=' test])* [','])
-fpdef: NAME | '(' fplist ')'
-fplist: fpdef (',' fpdef)* [',']
+funcdef: [decorators] 'def' NAME parameters ['->' test] ':' suite
+parameters: '(' [typedargslist] ')'
+typedargslist: ((tfpdef ['=' test] ',')*
+                ('*' [tname] (',' tname ['=' test])* [',' '**' tname] | '**' tname)
+                | tfpdef ['=' test] (',' tfpdef ['=' test])* [','])
+tname: NAME [':' test]
+tfpdef: tname | '(' tfplist ')'
+tfplist: tfpdef (',' tfpdef)* [',']
+varargslist: ((vfpdef ['=' test] ',')*
+              ('*' [vname] (',' vname ['=' test])*  [',' '**' vname] | '**' vname)
+              | vfpdef ['=' test] (',' vfpdef ['=' test])* [','])
+vname: NAME
+vfpdef: vname | '(' vfplist ')'
+vfplist: vfpdef (',' vfpdef)* [',']
 
 stmt: simple_stmt | compound_stmt
 simple_stmt: small_stmt (';' small_stmt)* [';'] NEWLINE
@@ -113,19 +120,19 @@
 power: atom trailer* ['**' factor]
 atom: ('(' [yield_expr|testlist_gexp] ')' |
        '[' [listmaker] ']' |
-       '{' [dictmaker] '}' |
+       '{' [dictsetmaker] '}' |
        '`' testlist1 '`' |
-       NAME | NUMBER | STRING+)
+       NAME | NUMBER | STRING+ | '.' '.' '.')
 listmaker: test ( list_for | (',' test)* [','] )
 testlist_gexp: test ( gen_for | (',' test)* [','] )
 lambdef: 'lambda' [varargslist] ':' test
 trailer: '(' [arglist] ')' | '[' subscriptlist ']' | '.' NAME
 subscriptlist: subscript (',' subscript)* [',']
-subscript: '.' '.' '.' | test | [test] ':' [test] [sliceop]
+subscript: test | [test] ':' [test] [sliceop]
 sliceop: ':' [test]
 exprlist: expr (',' expr)* [',']
 testlist: test (',' test)* [',']
-dictmaker: test ':' test (',' test ':' test)* [',']
+dictsetmaker: (test ':' test (',' test ':' test)* [',']) | (test (',' test)* [','])
 
 classdef: 'class' NAME ['(' [testlist] ')'] ':' suite
 

Modified: sandbox/trunk/2to3/pgen2/grammar.py
==============================================================================
--- sandbox/trunk/2to3/pgen2/grammar.py	(original)
+++ sandbox/trunk/2to3/pgen2/grammar.py	Sun Feb 11 01:18:46 2007
@@ -158,6 +158,7 @@
 **= DOUBLESTAREQUAL
 // DOUBLESLASH
 //= DOUBLESLASHEQUAL
+-> RARROW
 """
 
 opmap = {}

Modified: sandbox/trunk/2to3/refactor.py
==============================================================================
--- sandbox/trunk/2to3/refactor.py	(original)
+++ sandbox/trunk/2to3/refactor.py	Sun Feb 11 01:18:46 2007
@@ -20,13 +20,13 @@
 import logging
 
 # Local imports
+import tokenize
 import pytree
 import patcomp
 from pgen2 import driver
 import fixes
 import fixes.macros
 import pygram
-import tokenize
 
 logging.basicConfig(format='%(name)s: %(message)s', level=logging.INFO)
 

Modified: sandbox/trunk/2to3/tokenize.py
==============================================================================
--- sandbox/trunk/2to3/tokenize.py	(original)
+++ sandbox/trunk/2to3/tokenize.py	Sun Feb 11 01:18:46 2007
@@ -36,13 +36,17 @@
 __all__ = [x for x in dir(token) if x[0] != '_'] + ["COMMENT", "tokenize",
            "generate_tokens", "NL", "untokenize"]
 del x
-del token
 
 COMMENT = N_TOKENS
 tok_name[COMMENT] = 'COMMENT'
 NL = N_TOKENS + 1
 tok_name[NL] = 'NL'
-N_TOKENS += 2
+RARROW = N_TOKENS + 2
+token.RARROW = RARROW
+tok_name[RARROW] = 'RARROW'
+N_TOKENS += 3
+
+del token
 
 def group(*choices): return '(' + '|'.join(choices) + ')'
 def any(*choices): return group(*choices) + '*'
@@ -81,7 +85,7 @@
 # longest operators first (e.g., if = came before ==, == would get
 # recognized as two instances of =).
 Operator = group(r"\*\*=?", r">>=?", r"<<=?", r"<>", r"!=",
-                 r"//=?",
+                 r"//=?", r"->",
                  r"[+\-*/%&|^=<>]=?",
                  r"~")
 


More information about the Python-checkins mailing list