[Python-checkins] bpo-30406: Make async and await proper keywords (#1669)

Yury Selivanov webhook-mailer at python.org
Thu Oct 5 23:24:51 EDT 2017


https://github.com/python/cpython/commit/ac317700ce7439e38a8b420218d9a5035bba92ed
commit: ac317700ce7439e38a8b420218d9a5035bba92ed
branch: master
author: Jelle Zijlstra <jelle.zijlstra at gmail.com>
committer: Yury Selivanov <yury at magic.io>
date: 2017-10-05T23:24:46-04:00
summary:

bpo-30406: Make async and await proper keywords (#1669)

Per PEP 492, 'async' and 'await' should become proper keywords in 3.7.

files:
A Misc/NEWS.d/next/Core and Builtins/2017-07-20-22-03-44.bpo-30406._kr47t.rst
M Doc/library/token.rst
M Doc/tools/extensions/pyspecific.py
M Grammar/Grammar
M Include/graminit.h
M Include/token.h
M Lib/keyword.py
M Lib/lib2to3/Grammar.txt
M Lib/lib2to3/pgen2/token.py
M Lib/lib2to3/pgen2/tokenize.py
M Lib/lib2to3/tests/test_parser.py
M Lib/pydoc.py
M Lib/symbol.py
M Lib/test/test_asyncio/test_tasks.py
M Lib/test/test_coroutines.py
M Lib/test/test_parser.py
M Lib/test/test_tokenize.py
M Lib/tokenize.py
M Parser/tokenizer.c
M Parser/tokenizer.h
M Python/ast.c
M Python/graminit.c

diff --git a/Doc/library/token.rst b/Doc/library/token.rst
index b7ca9dbca72..373991027e4 100644
--- a/Doc/library/token.rst
+++ b/Doc/library/token.rst
@@ -98,8 +98,6 @@ The token constants are:
           RARROW
           ELLIPSIS
           OP
-          AWAIT
-          ASYNC
           ERRORTOKEN
           N_TOKENS
           NT_OFFSET
@@ -129,9 +127,11 @@ the :mod:`tokenize` module.
 
 
 .. versionchanged:: 3.5
-   Added :data:`AWAIT` and :data:`ASYNC` tokens. Starting with
-   Python 3.7, "async" and "await" will be tokenized as :data:`NAME`
-   tokens, and :data:`AWAIT` and :data:`ASYNC` will be removed.
+   Added :data:`AWAIT` and :data:`ASYNC` tokens.
 
 .. versionchanged:: 3.7
    Added :data:`COMMENT`, :data:`NL` and :data:`ENCODING` tokens.
+
+.. versionchanged:: 3.7
+   Removed :data:`AWAIT` and :data:`ASYNC` tokens. "async" and "await" are
+   now tokenized as :data:`NAME` tokens.
diff --git a/Doc/tools/extensions/pyspecific.py b/Doc/tools/extensions/pyspecific.py
index e8c88760a87..00acd4f55b8 100644
--- a/Doc/tools/extensions/pyspecific.py
+++ b/Doc/tools/extensions/pyspecific.py
@@ -272,9 +272,9 @@ def run(self):
 # Support for building "topic help" for pydoc
 
 pydoc_topic_labels = [
-    'assert', 'assignment', 'atom-identifiers', 'atom-literals',
-    'attribute-access', 'attribute-references', 'augassign', 'binary',
-    'bitwise', 'bltin-code-objects', 'bltin-ellipsis-object',
+    'assert', 'assignment', 'async', 'atom-identifiers', 'atom-literals',
+    'attribute-access', 'attribute-references', 'augassign', 'await',
+    'binary', 'bitwise', 'bltin-code-objects', 'bltin-ellipsis-object',
     'bltin-null-object', 'bltin-type-objects', 'booleans',
     'break', 'callable-types', 'calls', 'class', 'comparisons', 'compound',
     'context-managers', 'continue', 'conversions', 'customization', 'debugger',
diff --git a/Grammar/Grammar b/Grammar/Grammar
index 90582434bee..7d3dd0b86dc 100644
--- a/Grammar/Grammar
+++ b/Grammar/Grammar
@@ -16,7 +16,7 @@ decorator: '@' dotted_name [ '(' [arglist] ')' ] NEWLINE
 decorators: decorator+
 decorated: decorators (classdef | funcdef | async_funcdef)
 
-async_funcdef: ASYNC funcdef
+async_funcdef: 'async' funcdef
 funcdef: 'def' NAME parameters ['->' test] ':' suite
 
 parameters: '(' [typedargslist] ')'
@@ -68,7 +68,7 @@ nonlocal_stmt: 'nonlocal' NAME (',' NAME)*
 assert_stmt: 'assert' test [',' test]
 
 compound_stmt: if_stmt | while_stmt | for_stmt | try_stmt | with_stmt | funcdef | classdef | decorated | async_stmt
-async_stmt: ASYNC (funcdef | with_stmt | for_stmt)
+async_stmt: 'async' (funcdef | with_stmt | for_stmt)
 if_stmt: 'if' test ':' suite ('elif' test ':' suite)* ['else' ':' suite]
 while_stmt: 'while' test ':' suite ['else' ':' suite]
 for_stmt: 'for' exprlist 'in' testlist ':' suite ['else' ':' suite]
@@ -103,7 +103,7 @@ arith_expr: term (('+'|'-') term)*
 term: factor (('*'|'@'|'/'|'%'|'//') factor)*
 factor: ('+'|'-'|'~') factor | power
 power: atom_expr ['**' factor]
-atom_expr: [AWAIT] atom trailer*
+atom_expr: ['await'] atom trailer*
 atom: ('(' [yield_expr|testlist_comp] ')' |
        '[' [testlist_comp] ']' |
        '{' [dictorsetmaker] '}' |
@@ -139,7 +139,8 @@ argument: ( test [comp_for] |
             '*' test )
 
 comp_iter: comp_for | comp_if
-comp_for: [ASYNC] 'for' exprlist 'in' or_test [comp_iter]
+sync_comp_for: 'for' exprlist 'in' or_test [comp_iter]
+comp_for: ['async'] sync_comp_for
 comp_if: 'if' test_nocond [comp_iter]
 
 # not used in grammar, but may appear in "node" passed from Parser to Compiler
diff --git a/Include/graminit.h b/Include/graminit.h
index e9b4a938595..bdfe821ad71 100644
--- a/Include/graminit.h
+++ b/Include/graminit.h
@@ -81,8 +81,9 @@
 #define arglist 334
 #define argument 335
 #define comp_iter 336
-#define comp_for 337
-#define comp_if 338
-#define encoding_decl 339
-#define yield_expr 340
-#define yield_arg 341
+#define sync_comp_for 337
+#define comp_for 338
+#define comp_if 339
+#define encoding_decl 340
+#define yield_expr 341
+#define yield_arg 342
diff --git a/Include/token.h b/Include/token.h
index b28830b8b40..cd1cd00f09c 100644
--- a/Include/token.h
+++ b/Include/token.h
@@ -9,77 +9,75 @@ extern "C" {
 
 #undef TILDE   /* Prevent clash of our definition with system macro. Ex AIX, ioctl.h */
 
-#define ENDMARKER	0
-#define NAME		1
-#define NUMBER		2
-#define STRING		3
-#define NEWLINE		4
-#define INDENT		5
-#define DEDENT		6
-#define LPAR		7
-#define RPAR		8
-#define LSQB		9
-#define RSQB		10
-#define COLON		11
-#define COMMA		12
-#define SEMI		13
-#define PLUS		14
-#define MINUS		15
-#define STAR		16
-#define SLASH		17
-#define VBAR		18
-#define AMPER		19
-#define LESS		20
-#define GREATER		21
-#define EQUAL		22
-#define DOT		23
-#define PERCENT		24
-#define LBRACE		25
-#define RBRACE		26
-#define EQEQUAL		27
-#define NOTEQUAL	28
-#define LESSEQUAL	29
-#define GREATEREQUAL	30
-#define TILDE		31
-#define CIRCUMFLEX	32
-#define LEFTSHIFT	33
-#define RIGHTSHIFT	34
-#define DOUBLESTAR	35
-#define PLUSEQUAL	36
-#define MINEQUAL	37
-#define STAREQUAL	38
-#define SLASHEQUAL	39
-#define PERCENTEQUAL	40
-#define AMPEREQUAL	41
-#define VBAREQUAL	42
-#define CIRCUMFLEXEQUAL	43
-#define LEFTSHIFTEQUAL	44
-#define RIGHTSHIFTEQUAL	45
-#define DOUBLESTAREQUAL	46
-#define DOUBLESLASH	47
+#define ENDMARKER       0
+#define NAME            1
+#define NUMBER          2
+#define STRING          3
+#define NEWLINE         4
+#define INDENT          5
+#define DEDENT          6
+#define LPAR            7
+#define RPAR            8
+#define LSQB            9
+#define RSQB            10
+#define COLON           11
+#define COMMA           12
+#define SEMI            13
+#define PLUS            14
+#define MINUS           15
+#define STAR            16
+#define SLASH           17
+#define VBAR            18
+#define AMPER           19
+#define LESS            20
+#define GREATER         21
+#define EQUAL           22
+#define DOT             23
+#define PERCENT         24
+#define LBRACE          25
+#define RBRACE          26
+#define EQEQUAL         27
+#define NOTEQUAL        28
+#define LESSEQUAL       29
+#define GREATEREQUAL    30
+#define TILDE           31
+#define CIRCUMFLEX      32
+#define LEFTSHIFT       33
+#define RIGHTSHIFT      34
+#define DOUBLESTAR      35
+#define PLUSEQUAL       36
+#define MINEQUAL        37
+#define STAREQUAL       38
+#define SLASHEQUAL      39
+#define PERCENTEQUAL    40
+#define AMPEREQUAL      41
+#define VBAREQUAL       42
+#define CIRCUMFLEXEQUAL 43
+#define LEFTSHIFTEQUAL  44
+#define RIGHTSHIFTEQUAL 45
+#define DOUBLESTAREQUAL 46
+#define DOUBLESLASH     47
 #define DOUBLESLASHEQUAL 48
 #define AT              49
-#define ATEQUAL		50
+#define ATEQUAL         50
 #define RARROW          51
 #define ELLIPSIS        52
 /* Don't forget to update the table _PyParser_TokenNames in tokenizer.c! */
-#define OP		53
-#define AWAIT		54
-#define ASYNC		55
-#define ERRORTOKEN	56
+#define OP              53
+#define ERRORTOKEN      54
 /* These aren't used by the C tokenizer but are needed for tokenize.py */
-#define COMMENT		57
-#define NL		58
-#define ENCODING		59
-#define N_TOKENS	60
+#define COMMENT         55
+#define NL              56
+#define ENCODING                57
+#define N_TOKENS        58
 
 /* Special definitions for cooperation with parser */
 
-#define NT_OFFSET		256
+#define NT_OFFSET               256
 
-#define ISTERMINAL(x)		((x) < NT_OFFSET)
-#define ISNONTERMINAL(x)	((x) >= NT_OFFSET)
-#define ISEOF(x)		((x) == ENDMARKER)
+#define ISTERMINAL(x)           ((x) < NT_OFFSET)
+#define ISNONTERMINAL(x)        ((x) >= NT_OFFSET)
+#define ISEOF(x)                ((x) == ENDMARKER)
 
 
 PyAPI_DATA(const char *) _PyParser_TokenNames[]; /* Token names */
diff --git a/Lib/keyword.py b/Lib/keyword.py
index 6e1e882a91e..431991dcf4a 100755
--- a/Lib/keyword.py
+++ b/Lib/keyword.py
@@ -20,6 +20,8 @@
         'and',
         'as',
         'assert',
+        'async',
+        'await',
         'break',
         'class',
         'continue',
diff --git a/Lib/lib2to3/Grammar.txt b/Lib/lib2to3/Grammar.txt
index ded032522ba..0bdfcafcf3c 100644
--- a/Lib/lib2to3/Grammar.txt
+++ b/Lib/lib2to3/Grammar.txt
@@ -34,7 +34,7 @@ eval_input: testlist NEWLINE* ENDMARKER
 decorator: '@' dotted_name [ '(' [arglist] ')' ] NEWLINE
 decorators: decorator+
 decorated: decorators (classdef | funcdef | async_funcdef)
-async_funcdef: ASYNC funcdef
+async_funcdef: 'async' funcdef
 funcdef: 'def' NAME parameters ['->' test] ':' suite
 parameters: '(' [typedargslist] ')'
 typedargslist: ((tfpdef ['=' test] ',')*
@@ -85,7 +85,7 @@ exec_stmt: 'exec' expr ['in' test [',' test]]
 assert_stmt: 'assert' test [',' test]
 
 compound_stmt: if_stmt | while_stmt | for_stmt | try_stmt | with_stmt | funcdef | classdef | decorated | async_stmt
-async_stmt: ASYNC (funcdef | with_stmt | for_stmt)
+async_stmt: 'async' (funcdef | with_stmt | for_stmt)
 if_stmt: 'if' test ':' suite ('elif' test ':' suite)* ['else' ':' suite]
 while_stmt: 'while' test ':' suite ['else' ':' suite]
 for_stmt: 'for' exprlist 'in' testlist ':' suite ['else' ':' suite]
@@ -124,7 +124,7 @@ shift_expr: arith_expr (('<<'|'>>') arith_expr)*
 arith_expr: term (('+'|'-') term)*
 term: factor (('*'|'@'|'/'|'%'|'//') factor)*
 factor: ('+'|'-'|'~') factor | power
-power: [AWAIT] atom trailer* ['**' factor]
+power: ['await'] atom trailer* ['**' factor]
 atom: ('(' [yield_expr|testlist_gexp] ')' |
        '[' [listmaker] ']' |
        '{' [dictsetmaker] '}' |
@@ -161,7 +161,7 @@ argument: ( test [comp_for] |
 	    star_expr )
 
 comp_iter: comp_for | comp_if
-comp_for: [ASYNC] 'for' exprlist 'in' or_test [comp_iter]
+comp_for: ['async'] 'for' exprlist 'in' or_test [comp_iter]
 comp_if: 'if' old_test [comp_iter]
 
 # As noted above, testlist_safe extends the syntax allowed in list
@@ -180,7 +180,7 @@ comp_if: 'if' old_test [comp_iter]
 #
 # See https://bugs.python.org/issue27494
 old_comp_iter: old_comp_for | old_comp_if
-old_comp_for: [ASYNC] 'for' exprlist 'in' testlist_safe [old_comp_iter]
+old_comp_for: ['async'] 'for' exprlist 'in' testlist_safe [old_comp_iter]
 old_comp_if: 'if' old_test [old_comp_iter]
 
 testlist1: test (',' test)*
diff --git a/Lib/lib2to3/pgen2/token.py b/Lib/lib2to3/pgen2/token.py
index 1a679554d2d..7599396611b 100755
--- a/Lib/lib2to3/pgen2/token.py
+++ b/Lib/lib2to3/pgen2/token.py
@@ -62,10 +62,8 @@
 COMMENT = 53
 NL = 54
 RARROW = 55
-AWAIT = 56
-ASYNC = 57
-ERRORTOKEN = 58
-N_TOKENS = 59
+ERRORTOKEN = 56
+N_TOKENS = 57
 NT_OFFSET = 256
 #--end constants--
 
diff --git a/Lib/lib2to3/pgen2/tokenize.py b/Lib/lib2to3/pgen2/tokenize.py
index 45afc5f4e53..14560e4fddf 100644
--- a/Lib/lib2to3/pgen2/tokenize.py
+++ b/Lib/lib2to3/pgen2/tokenize.py
@@ -234,7 +234,7 @@ def compat(self, token, iterable):
         for tok in iterable:
             toknum, tokval = tok[:2]
 
-            if toknum in (NAME, NUMBER, ASYNC, AWAIT):
+            if toknum in (NAME, NUMBER):
                 tokval += ' '
 
             if toknum == INDENT:
@@ -380,12 +380,6 @@ def generate_tokens(readline):
     contline = None
     indents = [0]
 
-    # 'stashed' and 'async_*' are used for async/await parsing
-    stashed = None
-    async_def = False
-    async_def_indent = 0
-    async_def_nl = False
-
     while 1:                                   # loop over lines in stream
         try:
             line = readline()
@@ -426,10 +420,6 @@ def generate_tokens(readline):
                 pos = pos + 1
             if pos == max: break
 
-            if stashed:
-                yield stashed
-                stashed = None
-
             if line[pos] in '#\r\n':           # skip comments or blank lines
                 if line[pos] == '#':
                     comment_token = line[pos:].rstrip('\r\n')
@@ -453,18 +443,8 @@ def generate_tokens(readline):
                         ("<tokenize>", lnum, pos, line))
                 indents = indents[:-1]
 
-                if async_def and async_def_indent >= indents[-1]:
-                    async_def = False
-                    async_def_nl = False
-                    async_def_indent = 0
-
                 yield (DEDENT, '', (lnum, pos), (lnum, pos), line)
 
-            if async_def and async_def_nl and async_def_indent >= indents[-1]:
-                async_def = False
-                async_def_nl = False
-                async_def_indent = 0
-
         else:                                  # continued statement
             if not line:
                 raise TokenError("EOF in multi-line statement", (lnum, 0))
@@ -484,18 +464,10 @@ def generate_tokens(readline):
                     newline = NEWLINE
                     if parenlev > 0:
                         newline = NL
-                    elif async_def:
-                        async_def_nl = True
-                    if stashed:
-                        yield stashed
-                        stashed = None
                     yield (newline, token, spos, epos, line)
 
                 elif initial == '#':
                     assert not token.endswith("\n")
-                    if stashed:
-                        yield stashed
-                        stashed = None
                     yield (COMMENT, token, spos, epos, line)
                 elif token in triple_quoted:
                     endprog = endprogs[token]
@@ -503,9 +475,6 @@ def generate_tokens(readline):
                     if endmatch:                           # all on one line
                         pos = endmatch.end(0)
                         token = line[start:pos]
-                        if stashed:
-                            yield stashed
-                            stashed = None
                         yield (STRING, token, spos, (lnum, pos), line)
                     else:
                         strstart = (lnum, start)           # multiple lines
@@ -523,63 +492,22 @@ def generate_tokens(readline):
                         contline = line
                         break
                     else:                                  # ordinary string
-                        if stashed:
-                            yield stashed
-                            stashed = None
                         yield (STRING, token, spos, epos, line)
                 elif initial in namechars:                 # ordinary name
-                    if token in ('async', 'await'):
-                        if async_def:
-                            yield (ASYNC if token == 'async' else AWAIT,
-                                   token, spos, epos, line)
-                            continue
-
-                    tok = (NAME, token, spos, epos, line)
-                    if token == 'async' and not stashed:
-                        stashed = tok
-                        continue
-
-                    if token == 'def':
-                        if (stashed
-                                and stashed[0] == NAME
-                                and stashed[1] == 'async'):
-
-                            async_def = True
-                            async_def_indent = indents[-1]
-
-                            yield (ASYNC, stashed[1],
-                                   stashed[2], stashed[3],
-                                   stashed[4])
-                            stashed = None
-
-                    if stashed:
-                        yield stashed
-                        stashed = None
-
-                    yield tok
+                    yield (NAME, token, spos, epos, line)
                 elif initial == '\\':                      # continued stmt
                     # This yield is new; needed for better idempotency:
-                    if stashed:
-                        yield stashed
-                        stashed = None
                     yield (NL, token, spos, (lnum, pos), line)
                     continued = 1
                 else:
                     if initial in '([{': parenlev = parenlev + 1
                     elif initial in ')]}': parenlev = parenlev - 1
-                    if stashed:
-                        yield stashed
-                        stashed = None
                     yield (OP, token, spos, epos, line)
             else:
                 yield (ERRORTOKEN, line[pos],
                            (lnum, pos), (lnum, pos+1), line)
                 pos = pos + 1
 
-    if stashed:
-        yield stashed
-        stashed = None
-
     for indent in indents[1:]:                 # pop remaining indent levels
         yield (DEDENT, '', (lnum, 0), (lnum, 0), '')
     yield (ENDMARKER, '', (lnum, 0), (lnum, 0), '')
diff --git a/Lib/lib2to3/tests/test_parser.py b/Lib/lib2to3/tests/test_parser.py
index 2efcb80c2f9..dc94a69036a 100644
--- a/Lib/lib2to3/tests/test_parser.py
+++ b/Lib/lib2to3/tests/test_parser.py
@@ -167,34 +167,34 @@ def foo(): pass
             async def foo(): await x
         """)
 
-        self.invalid_syntax("await x")
-        self.invalid_syntax("""def foo():
-                                   await x""")
+        self.validate("await x")
+        self.validate("""def foo():
+                        await x""")
 
-        self.invalid_syntax("""def foo():
+        self.validate("""def foo():
             def foo(): pass
             async def foo(): pass
             await x
         """)
 
     def test_async_var(self):
-        self.validate("""async = 1""")
-        self.validate("""await = 1""")
-        self.validate("""def async(): pass""")
+        self.invalid_syntax("""async = 1""")
+        self.invalid_syntax("""await = 1""")
+        self.invalid_syntax("""def async(): pass""")
 
     def test_async_with(self):
         self.validate("""async def foo():
                              async for a in b: pass""")
 
-        self.invalid_syntax("""def foo():
-                                   async for a in b: pass""")
+        self.validate("""def foo():
+                             async for a in b: pass""")
 
     def test_async_for(self):
         self.validate("""async def foo():
                              async with a: pass""")
 
-        self.invalid_syntax("""def foo():
-                                   async with a: pass""")
+        self.validate("""def foo():
+                             async with a: pass""")
 
 
 class TestRaiseChanges(GrammarTest):
@@ -477,3 +477,7 @@ def diff(fn, result):
             os.remove("@")
         except OSError:
             pass
+
+
+if __name__ == '__main__':
+    unittest.main()
diff --git a/Lib/pydoc.py b/Lib/pydoc.py
index 8dc3c0ace3c..01f7a32f454 100644
--- a/Lib/pydoc.py
+++ b/Lib/pydoc.py
@@ -1703,7 +1703,7 @@ class Helper:
     # in pydoc_data/topics.py.
     #
     # CAUTION: if you change one of these dictionaries, be sure to adapt the
-    #          list of needed labels in Doc/tools/pyspecific.py and
+    #          list of needed labels in Doc/tools/extensions/pyspecific.py and
     #          regenerate the pydoc_data/topics.py file by running
     #              make pydoc-topics
     #          in Doc/ and copying the output file into the Lib/ directory.
@@ -1715,6 +1715,8 @@ class Helper:
         'and': 'BOOLEAN',
         'as': 'with',
         'assert': ('assert', ''),
+        'async': ('async', ''),
+        'await': ('await', ''),
         'break': ('break', 'while for'),
         'class': ('class', 'CLASSES SPECIALMETHODS'),
         'continue': ('continue', 'while for'),
diff --git a/Lib/symbol.py b/Lib/symbol.py
index d9f01e081a7..dc7dcba5e4d 100755
--- a/Lib/symbol.py
+++ b/Lib/symbol.py
@@ -91,11 +91,12 @@
 arglist = 334
 argument = 335
 comp_iter = 336
-comp_for = 337
-comp_if = 338
-encoding_decl = 339
-yield_expr = 340
-yield_arg = 341
+sync_comp_for = 337
+comp_for = 338
+comp_if = 339
+encoding_decl = 340
+yield_expr = 341
+yield_arg = 342
 #--end constants--
 
 sym_name = {}
diff --git a/Lib/test/test_asyncio/test_tasks.py b/Lib/test/test_asyncio/test_tasks.py
index 7ff56b560b6..e23963a6221 100644
--- a/Lib/test/test_asyncio/test_tasks.py
+++ b/Lib/test/test_asyncio/test_tasks.py
@@ -231,12 +231,6 @@ def test_ensure_future_neither(self):
         with self.assertRaises(TypeError):
             asyncio.ensure_future('ok')
 
-    def test_async_warning(self):
-        f = self.new_future(self.loop)
-        with self.assertWarnsRegex(DeprecationWarning,
-                                   'function is deprecated, use ensure_'):
-            self.assertIs(f, asyncio.async(f))
-
     def test_get_stack(self):
         T = None
 
diff --git a/Lib/test/test_coroutines.py b/Lib/test/test_coroutines.py
index 2b79a17ea70..ebd880bab0c 100644
--- a/Lib/test/test_coroutines.py
+++ b/Lib/test/test_coroutines.py
@@ -394,20 +394,14 @@ def async(): pass
         ]
 
         for code in samples:
-            with self.subTest(code=code), self.assertWarnsRegex(
-                    DeprecationWarning,
-                    "'await' will become reserved keywords"):
+            with self.subTest(code=code), self.assertRaises(SyntaxError):
                 compile(code, "<test>", "exec")
 
     def test_badsyntax_3(self):
-        with self.assertRaises(DeprecationWarning):
-            with warnings.catch_warnings():
-                warnings.simplefilter("error")
-                compile("async = 1", "<test>", "exec")
-
-    def test_goodsyntax_1(self):
-        # Tests for issue 24619
+        with self.assertRaises(SyntaxError):
+            compile("async = 1", "<test>", "exec")
 
+    def test_badsyntax_4(self):
         samples = [
             '''def foo(await):
                 async def foo(): pass
@@ -454,14 +448,8 @@ def foo(): pass
         ]
 
         for code in samples:
-            with self.subTest(code=code):
-                loc = {}
-
-                with warnings.catch_warnings():
-                    warnings.simplefilter("ignore")
-                    exec(code, loc, loc)
-
-                self.assertEqual(loc['foo'](10), 11)
+            with self.subTest(code=code), self.assertRaises(SyntaxError):
+                compile(code, "<test>", "exec")
 
 
 class TokenizerRegrTest(unittest.TestCase):
diff --git a/Lib/test/test_parser.py b/Lib/test/test_parser.py
index 70cabb28598..647d391c798 100644
--- a/Lib/test/test_parser.py
+++ b/Lib/test/test_parser.py
@@ -679,16 +679,16 @@ def test_missing_import_source(self):
     def test_illegal_encoding(self):
         # Illegal encoding declaration
         tree = \
-            (339,
+            (340,
              (257, (0, '')))
         self.check_bad_tree(tree, "missed encoding")
         tree = \
-            (339,
+            (340,
              (257, (0, '')),
               b'iso-8859-1')
         self.check_bad_tree(tree, "non-string encoding")
         tree = \
-            (339,
+            (340,
              (257, (0, '')),
               '\udcff')
         with self.assertRaises(UnicodeEncodeError):
diff --git a/Lib/test/test_tokenize.py b/Lib/test/test_tokenize.py
index 21eee6de2d1..3520a67bd42 100644
--- a/Lib/test/test_tokenize.py
+++ b/Lib/test/test_tokenize.py
@@ -759,7 +759,7 @@ def test_async(self):
     """)
 
         self.check_tokenize("async def foo(): pass", """\
-    ASYNC      'async'       (1, 0) (1, 5)
+    NAME       'async'       (1, 0) (1, 5)
     NAME       'def'         (1, 6) (1, 9)
     NAME       'foo'         (1, 10) (1, 13)
     OP         '('           (1, 13) (1, 14)
@@ -776,7 +776,7 @@ def foo(await):
     await
 async += 1
 ''', """\
-    ASYNC      'async'       (1, 0) (1, 5)
+    NAME       'async'       (1, 0) (1, 5)
     NAME       'def'         (1, 6) (1, 9)
     NAME       'foo'         (1, 10) (1, 13)
     OP         '('           (1, 13) (1, 14)
@@ -787,12 +787,12 @@ def foo(await):
     NAME       'def'         (2, 2) (2, 5)
     NAME       'foo'         (2, 6) (2, 9)
     OP         '('           (2, 9) (2, 10)
-    AWAIT      'await'       (2, 10) (2, 15)
+    NAME       'await'       (2, 10) (2, 15)
     OP         ')'           (2, 15) (2, 16)
     OP         ':'           (2, 16) (2, 17)
     NEWLINE    '\\n'          (2, 17) (2, 18)
     INDENT     '    '        (3, 0) (3, 4)
-    AWAIT      'await'       (3, 4) (3, 9)
+    NAME       'await'       (3, 4) (3, 9)
     OP         '='           (3, 10) (3, 11)
     NUMBER     '1'           (3, 12) (3, 13)
     NEWLINE    '\\n'          (3, 13) (3, 14)
@@ -802,7 +802,7 @@ def foo(await):
     OP         ':'           (4, 6) (4, 7)
     NEWLINE    '\\n'          (4, 7) (4, 8)
     INDENT     '    '        (5, 0) (5, 4)
-    AWAIT      'await'       (5, 4) (5, 9)
+    NAME       'await'       (5, 4) (5, 9)
     NEWLINE    '\\n'          (5, 9) (5, 10)
     DEDENT     ''            (6, 0) (6, 0)
     DEDENT     ''            (6, 0) (6, 0)
@@ -815,7 +815,7 @@ def foo(await):
         self.check_tokenize('''\
 async def foo():
   async for i in 1: pass''', """\
-    ASYNC      'async'       (1, 0) (1, 5)
+    NAME       'async'       (1, 0) (1, 5)
     NAME       'def'         (1, 6) (1, 9)
     NAME       'foo'         (1, 10) (1, 13)
     OP         '('           (1, 13) (1, 14)
@@ -823,7 +823,7 @@ def foo(await):
     OP         ':'           (1, 15) (1, 16)
     NEWLINE    '\\n'          (1, 16) (1, 17)
     INDENT     '  '          (2, 0) (2, 2)
-    ASYNC      'async'       (2, 2) (2, 7)
+    NAME       'async'       (2, 2) (2, 7)
     NAME       'for'         (2, 8) (2, 11)
     NAME       'i'           (2, 12) (2, 13)
     NAME       'in'          (2, 14) (2, 16)
@@ -834,14 +834,14 @@ def foo(await):
     """)
 
         self.check_tokenize('''async def foo(async): await''', """\
-    ASYNC      'async'       (1, 0) (1, 5)
+    NAME       'async'       (1, 0) (1, 5)
     NAME       'def'         (1, 6) (1, 9)
     NAME       'foo'         (1, 10) (1, 13)
     OP         '('           (1, 13) (1, 14)
-    ASYNC      'async'       (1, 14) (1, 19)
+    NAME       'async'       (1, 14) (1, 19)
     OP         ')'           (1, 19) (1, 20)
     OP         ':'           (1, 20) (1, 21)
-    AWAIT      'await'       (1, 22) (1, 27)
+    NAME       'await'       (1, 22) (1, 27)
     """)
 
         self.check_tokenize('''\
@@ -866,7 +866,7 @@ def baz(): pass
     OP         ':'           (3, 11) (3, 12)
     NAME       'pass'        (3, 13) (3, 17)
     NEWLINE    '\\n'          (3, 17) (3, 18)
-    ASYNC      'async'       (4, 2) (4, 7)
+    NAME       'async'       (4, 2) (4, 7)
     NAME       'def'         (4, 8) (4, 11)
     NAME       'bar'         (4, 12) (4, 15)
     OP         '('           (4, 15) (4, 16)
@@ -888,7 +888,7 @@ def baz(): pass
   async def bar(): pass
 
   await = 2''', """\
-    ASYNC      'async'       (1, 0) (1, 5)
+    NAME       'async'       (1, 0) (1, 5)
     NAME       'def'         (1, 6) (1, 9)
     NAME       'f'           (1, 10) (1, 11)
     OP         '('           (1, 11) (1, 12)
@@ -904,7 +904,7 @@ def baz(): pass
     OP         ':'           (3, 11) (3, 12)
     NAME       'pass'        (3, 13) (3, 17)
     NEWLINE    '\\n'          (3, 17) (3, 18)
-    ASYNC      'async'       (4, 2) (4, 7)
+    NAME       'async'       (4, 2) (4, 7)
     NAME       'def'         (4, 8) (4, 11)
     NAME       'bar'         (4, 12) (4, 15)
     OP         '('           (4, 15) (4, 16)
@@ -913,7 +913,7 @@ def baz(): pass
     NAME       'pass'        (4, 19) (4, 23)
     NEWLINE    '\\n'          (4, 23) (4, 24)
     NL         '\\n'          (5, 0) (5, 1)
-    AWAIT      'await'       (6, 2) (6, 7)
+    NAME       'await'       (6, 2) (6, 7)
     OP         '='           (6, 8) (6, 9)
     NUMBER     '2'           (6, 10) (6, 11)
     DEDENT     ''            (7, 0) (7, 0)
diff --git a/Lib/tokenize.py b/Lib/tokenize.py
index 5fa41526093..f5c6ac7f5e0 100644
--- a/Lib/tokenize.py
+++ b/Lib/tokenize.py
@@ -491,12 +491,6 @@ def _tokenize(readline, encoding):
     contline = None
     indents = [0]
 
-    # 'stashed' and 'async_*' are used for async/await parsing
-    stashed = None
-    async_def = False
-    async_def_indent = 0
-    async_def_nl = False
-
     if encoding is not None:
         if encoding == "utf-8-sig":
             # BOM will already have been stripped.
@@ -571,18 +565,8 @@ def _tokenize(readline, encoding):
                         ("<tokenize>", lnum, pos, line))
                 indents = indents[:-1]
 
-                if async_def and async_def_indent >= indents[-1]:
-                    async_def = False
-                    async_def_nl = False
-                    async_def_indent = 0
-
                 yield TokenInfo(DEDENT, '', (lnum, pos), (lnum, pos), line)
 
-            if async_def and async_def_nl and async_def_indent >= indents[-1]:
-                async_def = False
-                async_def_nl = False
-                async_def_indent = 0
-
         else:                                  # continued statement
             if not line:
                 raise TokenError("EOF in multi-line statement", (lnum, 0))
@@ -601,21 +585,13 @@ def _tokenize(readline, encoding):
                     (initial == '.' and token != '.' and token != '...')):
                     yield TokenInfo(NUMBER, token, spos, epos, line)
                 elif initial in '\r\n':
-                    if stashed:
-                        yield stashed
-                        stashed = None
                     if parenlev > 0:
                         yield TokenInfo(NL, token, spos, epos, line)
                     else:
                         yield TokenInfo(NEWLINE, token, spos, epos, line)
-                        if async_def:
-                            async_def_nl = True
 
                 elif initial == '#':
                     assert not token.endswith("\n")
-                    if stashed:
-                        yield stashed
-                        stashed = None
                     yield TokenInfo(COMMENT, token, spos, epos, line)
 
                 elif token in triple_quoted:
@@ -662,36 +638,7 @@ def _tokenize(readline, encoding):
                         yield TokenInfo(STRING, token, spos, epos, line)
 
                 elif initial.isidentifier():               # ordinary name
-                    if token in ('async', 'await'):
-                        if async_def:
-                            yield TokenInfo(
-                                ASYNC if token == 'async' else AWAIT,
-                                token, spos, epos, line)
-                            continue
-
-                    tok = TokenInfo(NAME, token, spos, epos, line)
-                    if token == 'async' and not stashed:
-                        stashed = tok
-                        continue
-
-                    if token == 'def':
-                        if (stashed
-                                and stashed.type == NAME
-                                and stashed.string == 'async'):
-
-                            async_def = True
-                            async_def_indent = indents[-1]
-
-                            yield TokenInfo(ASYNC, stashed.string,
-                                            stashed.start, stashed.end,
-                                            stashed.line)
-                            stashed = None
-
-                    if stashed:
-                        yield stashed
-                        stashed = None
-
-                    yield tok
+                    yield TokenInfo(NAME, token, spos, epos, line)
                 elif initial == '\\':                      # continued stmt
                     continued = 1
                 else:
@@ -699,19 +646,12 @@ def _tokenize(readline, encoding):
                         parenlev += 1
                     elif initial in ')]}':
                         parenlev -= 1
-                    if stashed:
-                        yield stashed
-                        stashed = None
                     yield TokenInfo(OP, token, spos, epos, line)
             else:
                 yield TokenInfo(ERRORTOKEN, line[pos],
                            (lnum, pos), (lnum, pos+1), line)
                 pos += 1
 
-    if stashed:
-        yield stashed
-        stashed = None
-
     for indent in indents[1:]:                 # pop remaining indent levels
         yield TokenInfo(DEDENT, '', (lnum, 0), (lnum, 0), '')
     yield TokenInfo(ENDMARKER, '', (lnum, 0), (lnum, 0), '')
diff --git a/Misc/NEWS.d/next/Core and Builtins/2017-07-20-22-03-44.bpo-30406._kr47t.rst b/Misc/NEWS.d/next/Core and Builtins/2017-07-20-22-03-44.bpo-30406._kr47t.rst
new file mode 100644
index 00000000000..caf56f03783
--- /dev/null
+++ b/Misc/NEWS.d/next/Core and Builtins/2017-07-20-22-03-44.bpo-30406._kr47t.rst	
@@ -0,0 +1 @@
+Make ``async`` and ``await`` proper keywords, as specified in PEP 492.
diff --git a/Parser/tokenizer.c b/Parser/tokenizer.c
index 51f98e9b2e9..28254e10331 100644
--- a/Parser/tokenizer.c
+++ b/Parser/tokenizer.c
@@ -103,8 +103,6 @@ const char *_PyParser_TokenNames[] = {
     "ELLIPSIS",
     /* This table must match the #defines in token.h! */
     "OP",
-    "AWAIT",
-    "ASYNC",
     "<ERRORTOKEN>",
     "COMMENT",
     "NL",
@@ -151,10 +149,6 @@ tok_new(void)
     tok->decoding_buffer = NULL;
 #endif
 
-    tok->async_def = 0;
-    tok->async_def_indent = 0;
-    tok->async_def_nl = 0;
-
     return tok;
 }
 
@@ -1471,21 +1465,6 @@ tok_get(struct tok_state *tok, char **p_start, char **p_end)
         }
     }
 
-    if (tok->async_def
-        && !blankline
-        && tok->level == 0
-        /* There was a NEWLINE after ASYNC DEF,
-           so we're past the signature. */
-        && tok->async_def_nl
-        /* Current indentation level is less than where
-           the async function was defined */
-        && tok->async_def_indent >= tok->indent)
-    {
-        tok->async_def = 0;
-        tok->async_def_indent = 0;
-        tok->async_def_nl = 0;
-    }
-
  again:
     tok->start = NULL;
     /* Skip spaces */
@@ -1550,43 +1529,6 @@ tok_get(struct tok_state *tok, char **p_start, char **p_end)
         *p_start = tok->start;
         *p_end = tok->cur;
 
-        /* async/await parsing block. */
-        if (tok->cur - tok->start == 5) {
-            /* Current token length is 5. */
-            if (tok->async_def) {
-                /* We're inside an 'async def' function. */
-                if (memcmp(tok->start, "async", 5) == 0) {
-                    return ASYNC;
-                }
-                if (memcmp(tok->start, "await", 5) == 0) {
-                    return AWAIT;
-                }
-            }
-            else if (memcmp(tok->start, "async", 5) == 0) {
-                /* The current token is 'async'.
-                   Look ahead one token.*/
-
-                struct tok_state ahead_tok;
-                char *ahead_tok_start = NULL, *ahead_tok_end = NULL;
-                int ahead_tok_kind;
-
-                memcpy(&ahead_tok, tok, sizeof(ahead_tok));
-                ahead_tok_kind = tok_get(&ahead_tok, &ahead_tok_start,
-                                         &ahead_tok_end);
-
-                if (ahead_tok_kind == NAME
-                    && ahead_tok.cur - ahead_tok.start == 3
-                    && memcmp(ahead_tok.start, "def", 3) == 0)
-                {
-                    /* The next token is going to be 'def', so instead of
-                       returning 'async' NAME token, we return ASYNC. */
-                    tok->async_def_indent = tok->indent;
-                    tok->async_def = 1;
-                    return ASYNC;
-                }
-            }
-        }
-
         return NAME;
     }
 
@@ -1599,11 +1541,6 @@ tok_get(struct tok_state *tok, char **p_start, char **p_end)
         *p_start = tok->start;
         *p_end = tok->cur - 1; /* Leave '\n' out of the string */
         tok->cont_line = 0;
-        if (tok->async_def) {
-            /* We're somewhere inside an 'async def' function, and
-               we've encountered a NEWLINE after its signature. */
-            tok->async_def_nl = 1;
-        }
         return NEWLINE;
     }
 
diff --git a/Parser/tokenizer.h b/Parser/tokenizer.h
index 0ad3551c894..ad8b1c80171 100644
--- a/Parser/tokenizer.h
+++ b/Parser/tokenizer.h
@@ -65,13 +65,6 @@ struct tok_state {
     const char* enc;        /* Encoding for the current str. */
     const char* str;
     const char* input; /* Tokenizer's newline translated copy of the string. */
-
-    /* async/await related fields; can be removed in 3.7 when async and await
-       become normal keywords. */
-    int async_def;        /* =1 if tokens are inside an 'async def' body. */
-    int async_def_indent; /* Indentation level of the outermost 'async def'. */
-    int async_def_nl;     /* =1 if the outermost 'async def' had at least one
-                             NEWLINE token after it. */
 };
 
 extern struct tok_state *PyTokenizer_FromString(const char *, int);
diff --git a/Python/ast.c b/Python/ast.c
index 33356da0756..6989965efab 100644
--- a/Python/ast.c
+++ b/Python/ast.c
@@ -949,28 +949,6 @@ forbidden_name(struct compiling *c, identifier name, const node *n,
         ast_error(c, n, "assignment to keyword");
         return 1;
     }
-    if (_PyUnicode_EqualToASCIIString(name, "async") ||
-        _PyUnicode_EqualToASCIIString(name, "await"))
-    {
-        PyObject *message = PyUnicode_FromString(
-            "'async' and 'await' will become reserved keywords"
-            " in Python 3.7");
-        int ret;
-        if (message == NULL) {
-            return 1;
-        }
-        ret = PyErr_WarnExplicitObject(
-                PyExc_DeprecationWarning,
-                message,
-                c->c_filename,
-                LINENO(n),
-                NULL,
-                NULL);
-        Py_DECREF(message);
-        if (ret < 0) {
-            return 1;
-        }
-    }
     if (full_checks) {
         const char * const *p;
         for (p = FORBIDDEN; *p; p++) {
@@ -1642,9 +1620,10 @@ ast_for_funcdef_impl(struct compiling *c, const node *n,
 static stmt_ty
 ast_for_async_funcdef(struct compiling *c, const node *n, asdl_seq *decorator_seq)
 {
-    /* async_funcdef: ASYNC funcdef */
+    /* async_funcdef: 'async' funcdef */
     REQ(n, async_funcdef);
-    REQ(CHILD(n, 0), ASYNC);
+    REQ(CHILD(n, 0), NAME);
+    assert(strcmp(STR(CHILD(n, 0)), "async") == 0);
     REQ(CHILD(n, 1), funcdef);
 
     return ast_for_funcdef_impl(c, CHILD(n, 1), decorator_seq,
@@ -1663,9 +1642,10 @@ ast_for_funcdef(struct compiling *c, const node *n, asdl_seq *decorator_seq)
 static stmt_ty
 ast_for_async_stmt(struct compiling *c, const node *n)
 {
-    /* async_stmt: ASYNC (funcdef | with_stmt | for_stmt) */
+    /* async_stmt: 'async' (funcdef | with_stmt | for_stmt) */
     REQ(n, async_stmt);
-    REQ(CHILD(n, 0), ASYNC);
+    REQ(CHILD(n, 0), NAME);
+    assert(strcmp(STR(CHILD(n, 0)), "async") == 0);
 
     switch (TYPE(CHILD(n, 1))) {
         case funcdef:
@@ -1778,17 +1758,23 @@ static int
 count_comp_fors(struct compiling *c, const node *n)
 {
     int n_fors = 0;
-    int is_async;
 
   count_comp_for:
-    is_async = 0;
     n_fors++;
     REQ(n, comp_for);
-    if (TYPE(CHILD(n, 0)) == ASYNC) {
-        is_async = 1;
+    if (NCH(n) == 2) {
+        REQ(CHILD(n, 0), NAME);
+        assert(strcmp(STR(CHILD(n, 0)), "async") == 0);
+        n = CHILD(n, 1);
     }
-    if (NCH(n) == (5 + is_async)) {
-        n = CHILD(n, 4 + is_async);
+    else if (NCH(n) == 1) {
+        n = CHILD(n, 0);
+    }
+    else {
+        goto error;
+    }
+    if (NCH(n) == (5)) {
+        n = CHILD(n, 4);
     }
     else {
         return n_fors;
@@ -1807,6 +1793,7 @@ count_comp_fors(struct compiling *c, const node *n)
             return n_fors;
     }
 
+  error:
     /* Should never be reached */
     PyErr_SetString(PyExc_SystemError,
                     "logic error in count_comp_fors");
@@ -1855,19 +1842,27 @@ ast_for_comprehension(struct compiling *c, const node *n)
         asdl_seq *t;
         expr_ty expression, first;
         node *for_ch;
+        node *sync_n;
         int is_async = 0;
 
         REQ(n, comp_for);
 
-        if (TYPE(CHILD(n, 0)) == ASYNC) {
+        if (NCH(n) == 2) {
             is_async = 1;
+            REQ(CHILD(n, 0), NAME);
+            assert(strcmp(STR(CHILD(n, 0)), "async") == 0);
+            sync_n = CHILD(n, 1);
+        }
+        else {
+            sync_n = CHILD(n, 0);
         }
+        REQ(sync_n, sync_comp_for);
 
-        for_ch = CHILD(n, 1 + is_async);
+        for_ch = CHILD(sync_n, 1);
         t = ast_for_exprlist(c, for_ch, Store);
         if (!t)
             return NULL;
-        expression = ast_for_expr(c, CHILD(n, 3 + is_async));
+        expression = ast_for_expr(c, CHILD(sync_n, 3));
         if (!expression)
             return NULL;
 
@@ -1884,11 +1879,11 @@ ast_for_comprehension(struct compiling *c, const node *n)
         if (!comp)
             return NULL;
 
-        if (NCH(n) == (5 + is_async)) {
+        if (NCH(sync_n) == 5) {
             int j, n_ifs;
             asdl_seq *ifs;
 
-            n = CHILD(n, 4 + is_async);
+            n = CHILD(sync_n, 4);
             n_ifs = count_comp_ifs(c, n);
             if (n_ifs == -1)
                 return NULL;
@@ -2470,7 +2465,7 @@ ast_for_atom_expr(struct compiling *c, const node *n)
     REQ(n, atom_expr);
     nch = NCH(n);
 
-    if (TYPE(CHILD(n, 0)) == AWAIT) {
+    if (TYPE(CHILD(n, 0)) == NAME && strcmp(STR(CHILD(n, 0)), "await") == 0) {
         start = 1;
         assert(nch > 1);
     }
@@ -2497,7 +2492,7 @@ ast_for_atom_expr(struct compiling *c, const node *n)
     }
 
     if (start) {
-        /* there was an AWAIT */
+        /* there was an 'await' */
         return Await(e, LINENO(n), n->n_col_offset, c->c_arena);
     }
     else {
@@ -2562,7 +2557,7 @@ ast_for_expr(struct compiling *c, const node *n)
        term: factor (('*'|'@'|'/'|'%'|'//') factor)*
        factor: ('+'|'-'|'~') factor | power
        power: atom_expr ['**' factor]
-       atom_expr: [AWAIT] atom trailer*
+       atom_expr: ['await'] atom trailer*
        yield_expr: 'yield' [yield_arg]
     */
 
diff --git a/Python/graminit.c b/Python/graminit.c
index f2584e0a2ad..8e89ccea3ba 100644
--- a/Python/graminit.c
+++ b/Python/graminit.c
@@ -1812,272 +1812,284 @@ static state states_80[2] = {
     {2, arcs_80_0},
     {1, arcs_80_1},
 };
-static arc arcs_81_0[2] = {
-    {21, 1},
-    {101, 2},
+static arc arcs_81_0[1] = {
+    {101, 1},
 };
 static arc arcs_81_1[1] = {
-    {101, 2},
+    {66, 2},
 };
 static arc arcs_81_2[1] = {
-    {66, 3},
+    {102, 3},
 };
 static arc arcs_81_3[1] = {
-    {102, 4},
+    {112, 4},
 };
-static arc arcs_81_4[1] = {
-    {112, 5},
+static arc arcs_81_4[2] = {
+    {171, 5},
+    {0, 4},
 };
-static arc arcs_81_5[2] = {
-    {171, 6},
+static arc arcs_81_5[1] = {
     {0, 5},
 };
-static arc arcs_81_6[1] = {
-    {0, 6},
-};
-static state states_81[7] = {
-    {2, arcs_81_0},
+static state states_81[6] = {
+    {1, arcs_81_0},
     {1, arcs_81_1},
     {1, arcs_81_2},
     {1, arcs_81_3},
-    {1, arcs_81_4},
-    {2, arcs_81_5},
-    {1, arcs_81_6},
+    {2, arcs_81_4},
+    {1, arcs_81_5},
 };
-static arc arcs_82_0[1] = {
-    {97, 1},
+static arc arcs_82_0[2] = {
+    {21, 1},
+    {173, 2},
 };
 static arc arcs_82_1[1] = {
-    {114, 2},
+    {173, 2},
 };
-static arc arcs_82_2[2] = {
-    {171, 3},
+static arc arcs_82_2[1] = {
     {0, 2},
 };
-static arc arcs_82_3[1] = {
-    {0, 3},
-};
-static state states_82[4] = {
-    {1, arcs_82_0},
+static state states_82[3] = {
+    {2, arcs_82_0},
     {1, arcs_82_1},
-    {2, arcs_82_2},
-    {1, arcs_82_3},
+    {1, arcs_82_2},
 };
 static arc arcs_83_0[1] = {
-    {23, 1},
+    {97, 1},
 };
 static arc arcs_83_1[1] = {
-    {0, 1},
+    {114, 2},
+};
+static arc arcs_83_2[2] = {
+    {171, 3},
+    {0, 2},
+};
+static arc arcs_83_3[1] = {
+    {0, 3},
 };
-static state states_83[2] = {
+static state states_83[4] = {
     {1, arcs_83_0},
     {1, arcs_83_1},
+    {2, arcs_83_2},
+    {1, arcs_83_3},
 };
 static arc arcs_84_0[1] = {
-    {174, 1},
+    {23, 1},
 };
-static arc arcs_84_1[2] = {
-    {175, 2},
+static arc arcs_84_1[1] = {
     {0, 1},
 };
-static arc arcs_84_2[1] = {
+static state states_84[2] = {
+    {1, arcs_84_0},
+    {1, arcs_84_1},
+};
+static arc arcs_85_0[1] = {
+    {175, 1},
+};
+static arc arcs_85_1[2] = {
+    {176, 2},
+    {0, 1},
+};
+static arc arcs_85_2[1] = {
     {0, 2},
 };
-static state states_84[3] = {
-    {1, arcs_84_0},
-    {2, arcs_84_1},
-    {1, arcs_84_2},
+static state states_85[3] = {
+    {1, arcs_85_0},
+    {2, arcs_85_1},
+    {1, arcs_85_2},
 };
-static arc arcs_85_0[2] = {
+static arc arcs_86_0[2] = {
     {77, 1},
     {9, 2},
 };
-static arc arcs_85_1[1] = {
+static arc arcs_86_1[1] = {
     {26, 2},
 };
-static arc arcs_85_2[1] = {
+static arc arcs_86_2[1] = {
     {0, 2},
 };
-static state states_85[3] = {
-    {2, arcs_85_0},
-    {1, arcs_85_1},
-    {1, arcs_85_2},
+static state states_86[3] = {
+    {2, arcs_86_0},
+    {1, arcs_86_1},
+    {1, arcs_86_2},
 };
-static dfa dfas[86] = {
+static dfa dfas[87] = {
     {256, "single_input", 0, 3, states_0,
-     "\004\050\340\000\002\000\000\000\012\076\011\007\262\004\020\002\000\300\220\050\037\102"},
+     "\004\050\340\000\002\000\000\000\012\076\011\007\262\004\020\002\000\300\220\050\037\202\000"},
     {257, "file_input", 0, 2, states_1,
-     "\204\050\340\000\002\000\000\000\012\076\011\007\262\004\020\002\000\300\220\050\037\102"},
+     "\204\050\340\000\002\000\000\000\012\076\011\007\262\004\020\002\000\300\220\050\037\202\000"},
     {258, "eval_input", 0, 3, states_2,
-     "\000\040\200\000\000\000\000\000\000\000\010\000\000\000\020\002\000\300\220\050\037\000"},
+     "\000\040\200\000\000\000\000\000\000\000\010\000\000\000\020\002\000\300\220\050\037\000\000"},
     {259, "decorator", 0, 7, states_3,
-     "\000\010\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
+     "\000\010\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
     {260, "decorators", 0, 2, states_4,
-     "\000\010\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
+     "\000\010\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
     {261, "decorated", 0, 3, states_5,
-     "\000\010\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
+     "\000\010\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
     {262, "async_funcdef", 0, 3, states_6,
-     "\000\000\040\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
+     "\000\000\040\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
     {263, "funcdef", 0, 8, states_7,
-     "\000\000\100\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
+     "\000\000\100\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
     {264, "parameters", 0, 4, states_8,
-     "\000\040\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
+     "\000\040\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
     {265, "typedargslist", 0, 19, states_9,
-     "\000\000\200\000\006\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
+     "\000\000\200\000\006\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
     {266, "tfpdef", 0, 4, states_10,
-     "\000\000\200\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
+     "\000\000\200\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
     {267, "varargslist", 0, 19, states_11,
-     "\000\000\200\000\006\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
+     "\000\000\200\000\006\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
     {268, "vfpdef", 0, 2, states_12,
-     "\000\000\200\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
+     "\000\000\200\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
     {269, "stmt", 0, 2, states_13,
-     "\000\050\340\000\002\000\000\000\012\076\011\007\262\004\020\002\000\300\220\050\037\102"},
+     "\000\050\340\000\002\000\000\000\012\076\011\007\262\004\020\002\000\300\220\050\037\202\000"},
     {270, "simple_stmt", 0, 4, states_14,
-     "\000\040\200\000\002\000\000\000\012\076\011\007\000\000\020\002\000\300\220\050\037\100"},
+     "\000\040\200\000\002\000\000\000\012\076\011\007\000\000\020\002\000\300\220\050\037\200\000"},
     {271, "small_stmt", 0, 2, states_15,
-     "\000\040\200\000\002\000\000\000\012\076\011\007\000\000\020\002\000\300\220\050\037\100"},
+     "\000\040\200\000\002\000\000\000\012\076\011\007\000\000\020\002\000\300\220\050\037\200\000"},
     {272, "expr_stmt", 0, 6, states_16,
-     "\000\040\200\000\002\000\000\000\000\000\010\000\000\000\020\002\000\300\220\050\037\000"},
+     "\000\040\200\000\002\000\000\000\000\000\010\000\000\000\020\002\000\300\220\050\037\000\000"},
     {273, "annassign", 0, 5, states_17,
-     "\000\000\000\010\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
+     "\000\000\000\010\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
     {274, "testlist_star_expr", 0, 3, states_18,
-     "\000\040\200\000\002\000\000\000\000\000\010\000\000\000\020\002\000\300\220\050\037\000"},
+     "\000\040\200\000\002\000\000\000\000\000\010\000\000\000\020\002\000\300\220\050\037\000\000"},
     {275, "augassign", 0, 2, states_19,
-     "\000\000\000\000\000\000\360\377\001\000\000\000\000\000\000\000\000\000\000\000\000\000"},
+     "\000\000\000\000\000\000\360\377\001\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
     {276, "del_stmt", 0, 3, states_20,
-     "\000\000\000\000\000\000\000\000\002\000\000\000\000\000\000\000\000\000\000\000\000\000"},
+     "\000\000\000\000\000\000\000\000\002\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
     {277, "pass_stmt", 0, 2, states_21,
-     "\000\000\000\000\000\000\000\000\010\000\000\000\000\000\000\000\000\000\000\000\000\000"},
+     "\000\000\000\000\000\000\000\000\010\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
     {278, "flow_stmt", 0, 2, states_22,
-     "\000\000\000\000\000\000\000\000\000\036\000\000\000\000\000\000\000\000\000\000\000\100"},
+     "\000\000\000\000\000\000\000\000\000\036\000\000\000\000\000\000\000\000\000\000\000\200\000"},
     {279, "break_stmt", 0, 2, states_23,
-     "\000\000\000\000\000\000\000\000\000\002\000\000\000\000\000\000\000\000\000\000\000\000"},
+     "\000\000\000\000\000\000\000\000\000\002\000\000\000\000\000\000\000\000\000\000\000\000\000"},
     {280, "continue_stmt", 0, 2, states_24,
-     "\000\000\000\000\000\000\000\000\000\004\000\000\000\000\000\000\000\000\000\000\000\000"},
+     "\000\000\000\000\000\000\000\000\000\004\000\000\000\000\000\000\000\000\000\000\000\000\000"},
     {281, "return_stmt", 0, 3, states_25,
-     "\000\000\000\000\000\000\000\000\000\010\000\000\000\000\000\000\000\000\000\000\000\000"},
+     "\000\000\000\000\000\000\000\000\000\010\000\000\000\000\000\000\000\000\000\000\000\000\000"},
     {282, "yield_stmt", 0, 2, states_26,
-     "\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\100"},
+     "\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\200\000"},
     {283, "raise_stmt", 0, 5, states_27,
-     "\000\000\000\000\000\000\000\000\000\020\000\000\000\000\000\000\000\000\000\000\000\000"},
+     "\000\000\000\000\000\000\000\000\000\020\000\000\000\000\000\000\000\000\000\000\000\000\000"},
     {284, "import_stmt", 0, 2, states_28,
-     "\000\000\000\000\000\000\000\000\000\040\001\000\000\000\000\000\000\000\000\000\000\000"},
+     "\000\000\000\000\000\000\000\000\000\040\001\000\000\000\000\000\000\000\000\000\000\000\000"},
     {285, "import_name", 0, 3, states_29,
-     "\000\000\000\000\000\000\000\000\000\000\001\000\000\000\000\000\000\000\000\000\000\000"},
+     "\000\000\000\000\000\000\000\000\000\000\001\000\000\000\000\000\000\000\000\000\000\000\000"},
     {286, "import_from", 0, 8, states_30,
-     "\000\000\000\000\000\000\000\000\000\040\000\000\000\000\000\000\000\000\000\000\000\000"},
+     "\000\000\000\000\000\000\000\000\000\040\000\000\000\000\000\000\000\000\000\000\000\000\000"},
     {287, "import_as_name", 0, 4, states_31,
-     "\000\000\200\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
+     "\000\000\200\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
     {288, "dotted_as_name", 0, 4, states_32,
-     "\000\000\200\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
+     "\000\000\200\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
     {289, "import_as_names", 0, 3, states_33,
-     "\000\000\200\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
+     "\000\000\200\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
     {290, "dotted_as_names", 0, 2, states_34,
-     "\000\000\200\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
+     "\000\000\200\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
     {291, "dotted_name", 0, 2, states_35,
-     "\000\000\200\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
+     "\000\000\200\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
     {292, "global_stmt", 0, 3, states_36,
-     "\000\000\000\000\000\000\000\000\000\000\000\001\000\000\000\000\000\000\000\000\000\000"},
+     "\000\000\000\000\000\000\000\000\000\000\000\001\000\000\000\000\000\000\000\000\000\000\000"},
     {293, "nonlocal_stmt", 0, 3, states_37,
-     "\000\000\000\000\000\000\000\000\000\000\000\002\000\000\000\000\000\000\000\000\000\000"},
+     "\000\000\000\000\000\000\000\000\000\000\000\002\000\000\000\000\000\000\000\000\000\000\000"},
     {294, "assert_stmt", 0, 5, states_38,
-     "\000\000\000\000\000\000\000\000\000\000\000\004\000\000\000\000\000\000\000\000\000\000"},
+     "\000\000\000\000\000\000\000\000\000\000\000\004\000\000\000\000\000\000\000\000\000\000\000"},
     {295, "compound_stmt", 0, 2, states_39,
-     "\000\010\140\000\000\000\000\000\000\000\000\000\262\004\000\000\000\000\000\000\000\002"},
+     "\000\010\140\000\000\000\000\000\000\000\000\000\262\004\000\000\000\000\000\000\000\002\000"},
     {296, "async_stmt", 0, 3, states_40,
-     "\000\000\040\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
+     "\000\000\040\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
     {297, "if_stmt", 0, 8, states_41,
-     "\000\000\000\000\000\000\000\000\000\000\000\000\002\000\000\000\000\000\000\000\000\000"},
+     "\000\000\000\000\000\000\000\000\000\000\000\000\002\000\000\000\000\000\000\000\000\000\000"},
     {298, "while_stmt", 0, 8, states_42,
-     "\000\000\000\000\000\000\000\000\000\000\000\000\020\000\000\000\000\000\000\000\000\000"},
+     "\000\000\000\000\000\000\000\000\000\000\000\000\020\000\000\000\000\000\000\000\000\000\000"},
     {299, "for_stmt", 0, 10, states_43,
-     "\000\000\000\000\000\000\000\000\000\000\000\000\040\000\000\000\000\000\000\000\000\000"},
+     "\000\000\000\000\000\000\000\000\000\000\000\000\040\000\000\000\000\000\000\000\000\000\000"},
     {300, "try_stmt", 0, 13, states_44,
-     "\000\000\000\000\000\000\000\000\000\000\000\000\200\000\000\000\000\000\000\000\000\000"},
+     "\000\000\000\000\000\000\000\000\000\000\000\000\200\000\000\000\000\000\000\000\000\000\000"},
     {301, "with_stmt", 0, 5, states_45,
-     "\000\000\000\000\000\000\000\000\000\000\000\000\000\004\000\000\000\000\000\000\000\000"},
+     "\000\000\000\000\000\000\000\000\000\000\000\000\000\004\000\000\000\000\000\000\000\000\000"},
     {302, "with_item", 0, 4, states_46,
-     "\000\040\200\000\000\000\000\000\000\000\010\000\000\000\020\002\000\300\220\050\037\000"},
+     "\000\040\200\000\000\000\000\000\000\000\010\000\000\000\020\002\000\300\220\050\037\000\000"},
     {303, "except_clause", 0, 5, states_47,
-     "\000\000\000\000\000\000\000\000\000\000\000\000\000\040\000\000\000\000\000\000\000\000"},
+     "\000\000\000\000\000\000\000\000\000\000\000\000\000\040\000\000\000\000\000\000\000\000\000"},
     {304, "suite", 0, 5, states_48,
-     "\004\040\200\000\002\000\000\000\012\076\011\007\000\000\020\002\000\300\220\050\037\100"},
+     "\004\040\200\000\002\000\000\000\012\076\011\007\000\000\020\002\000\300\220\050\037\200\000"},
     {305, "test", 0, 6, states_49,
-     "\000\040\200\000\000\000\000\000\000\000\010\000\000\000\020\002\000\300\220\050\037\000"},
+     "\000\040\200\000\000\000\000\000\000\000\010\000\000\000\020\002\000\300\220\050\037\000\000"},
     {306, "test_nocond", 0, 2, states_50,
-     "\000\040\200\000\000\000\000\000\000\000\010\000\000\000\020\002\000\300\220\050\037\000"},
+     "\000\040\200\000\000\000\000\000\000\000\010\000\000\000\020\002\000\300\220\050\037\000\000"},
     {307, "lambdef", 0, 5, states_51,
-     "\000\000\000\000\000\000\000\000\000\000\000\000\000\000\020\000\000\000\000\000\000\000"},
+     "\000\000\000\000\000\000\000\000\000\000\000\000\000\000\020\000\000\000\000\000\000\000\000"},
     {308, "lambdef_nocond", 0, 5, states_52,
-     "\000\000\000\000\000\000\000\000\000\000\000\000\000\000\020\000\000\000\000\000\000\000"},
+     "\000\000\000\000\000\000\000\000\000\000\000\000\000\000\020\000\000\000\000\000\000\000\000"},
     {309, "or_test", 0, 2, states_53,
-     "\000\040\200\000\000\000\000\000\000\000\010\000\000\000\000\002\000\300\220\050\037\000"},
+     "\000\040\200\000\000\000\000\000\000\000\010\000\000\000\000\002\000\300\220\050\037\000\000"},
     {310, "and_test", 0, 2, states_54,
-     "\000\040\200\000\000\000\000\000\000\000\010\000\000\000\000\002\000\300\220\050\037\000"},
+     "\000\040\200\000\000\000\000\000\000\000\010\000\000\000\000\002\000\300\220\050\037\000\000"},
     {311, "not_test", 0, 3, states_55,
-     "\000\040\200\000\000\000\000\000\000\000\010\000\000\000\000\002\000\300\220\050\037\000"},
+     "\000\040\200\000\000\000\000\000\000\000\010\000\000\000\000\002\000\300\220\050\037\000\000"},
     {312, "comparison", 0, 2, states_56,
-     "\000\040\200\000\000\000\000\000\000\000\010\000\000\000\000\000\000\300\220\050\037\000"},
+     "\000\040\200\000\000\000\000\000\000\000\010\000\000\000\000\000\000\300\220\050\037\000\000"},
     {313, "comp_op", 0, 4, states_57,
-     "\000\000\000\000\000\000\000\000\000\000\000\000\100\000\000\362\017\000\000\000\000\000"},
+     "\000\000\000\000\000\000\000\000\000\000\000\000\100\000\000\362\017\000\000\000\000\000\000"},
     {314, "star_expr", 0, 3, states_58,
-     "\000\000\000\000\002\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
+     "\000\000\000\000\002\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
     {315, "expr", 0, 2, states_59,
-     "\000\040\200\000\000\000\000\000\000\000\010\000\000\000\000\000\000\300\220\050\037\000"},
+     "\000\040\200\000\000\000\000\000\000\000\010\000\000\000\000\000\000\300\220\050\037\000\000"},
     {316, "xor_expr", 0, 2, states_60,
-     "\000\040\200\000\000\000\000\000\000\000\010\000\000\000\000\000\000\300\220\050\037\000"},
+     "\000\040\200\000\000\000\000\000\000\000\010\000\000\000\000\000\000\300\220\050\037\000\000"},
     {317, "and_expr", 0, 2, states_61,
-     "\000\040\200\000\000\000\000\000\000\000\010\000\000\000\000\000\000\300\220\050\037\000"},
+     "\000\040\200\000\000\000\000\000\000\000\010\000\000\000\000\000\000\300\220\050\037\000\000"},
     {318, "shift_expr", 0, 2, states_62,
-     "\000\040\200\000\000\000\000\000\000\000\010\000\000\000\000\000\000\300\220\050\037\000"},
+     "\000\040\200\000\000\000\000\000\000\000\010\000\000\000\000\000\000\300\220\050\037\000\000"},
     {319, "arith_expr", 0, 2, states_63,
-     "\000\040\200\000\000\000\000\000\000\000\010\000\000\000\000\000\000\300\220\050\037\000"},
+     "\000\040\200\000\000\000\000\000\000\000\010\000\000\000\000\000\000\300\220\050\037\000\000"},
     {320, "term", 0, 2, states_64,
-     "\000\040\200\000\000\000\000\000\000\000\010\000\000\000\000\000\000\300\220\050\037\000"},
+     "\000\040\200\000\000\000\000\000\000\000\010\000\000\000\000\000\000\300\220\050\037\000\000"},
     {321, "factor", 0, 3, states_65,
-     "\000\040\200\000\000\000\000\000\000\000\010\000\000\000\000\000\000\300\220\050\037\000"},
+     "\000\040\200\000\000\000\000\000\000\000\010\000\000\000\000\000\000\300\220\050\037\000\000"},
     {322, "power", 0, 4, states_66,
-     "\000\040\200\000\000\000\000\000\000\000\010\000\000\000\000\000\000\000\200\050\037\000"},
+     "\000\040\200\000\000\000\000\000\000\000\010\000\000\000\000\000\000\000\200\050\037\000\000"},
     {323, "atom_expr", 0, 3, states_67,
-     "\000\040\200\000\000\000\000\000\000\000\010\000\000\000\000\000\000\000\200\050\037\000"},
+     "\000\040\200\000\000\000\000\000\000\000\010\000\000\000\000\000\000\000\200\050\037\000\000"},
     {324, "atom", 0, 9, states_68,
-     "\000\040\200\000\000\000\000\000\000\000\010\000\000\000\000\000\000\000\000\050\037\000"},
+     "\000\040\200\000\000\000\000\000\000\000\010\000\000\000\000\000\000\000\000\050\037\000\000"},
     {325, "testlist_comp", 0, 5, states_69,
-     "\000\040\200\000\002\000\000\000\000\000\010\000\000\000\020\002\000\300\220\050\037\000"},
+     "\000\040\200\000\002\000\000\000\000\000\010\000\000\000\020\002\000\300\220\050\037\000\000"},
     {326, "trailer", 0, 7, states_70,
-     "\000\040\000\000\000\000\000\000\000\000\004\000\000\000\000\000\000\000\000\010\000\000"},
+     "\000\040\000\000\000\000\000\000\000\000\004\000\000\000\000\000\000\000\000\010\000\000\000"},
     {327, "subscriptlist", 0, 3, states_71,
-     "\000\040\200\010\000\000\000\000\000\000\010\000\000\000\020\002\000\300\220\050\037\000"},
+     "\000\040\200\010\000\000\000\000\000\000\010\000\000\000\020\002\000\300\220\050\037\000\000"},
     {328, "subscript", 0, 5, states_72,
-     "\000\040\200\010\000\000\000\000\000\000\010\000\000\000\020\002\000\300\220\050\037\000"},
+     "\000\040\200\010\000\000\000\000\000\000\010\000\000\000\020\002\000\300\220\050\037\000\000"},
     {329, "sliceop", 0, 3, states_73,
-     "\000\000\000\010\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
+     "\000\000\000\010\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
     {330, "exprlist", 0, 3, states_74,
-     "\000\040\200\000\002\000\000\000\000\000\010\000\000\000\000\000\000\300\220\050\037\000"},
+     "\000\040\200\000\002\000\000\000\000\000\010\000\000\000\000\000\000\300\220\050\037\000\000"},
     {331, "testlist", 0, 3, states_75,
-     "\000\040\200\000\000\000\000\000\000\000\010\000\000\000\020\002\000\300\220\050\037\000"},
+     "\000\040\200\000\000\000\000\000\000\000\010\000\000\000\020\002\000\300\220\050\037\000\000"},
     {332, "dictorsetmaker", 0, 14, states_76,
-     "\000\040\200\000\006\000\000\000\000\000\010\000\000\000\020\002\000\300\220\050\037\000"},
+     "\000\040\200\000\006\000\000\000\000\000\010\000\000\000\020\002\000\300\220\050\037\000\000"},
     {333, "classdef", 0, 8, states_77,
-     "\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\002"},
+     "\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\002\000"},
     {334, "arglist", 0, 3, states_78,
-     "\000\040\200\000\006\000\000\000\000\000\010\000\000\000\020\002\000\300\220\050\037\000"},
+     "\000\040\200\000\006\000\000\000\000\000\010\000\000\000\020\002\000\300\220\050\037\000\000"},
     {335, "argument", 0, 4, states_79,
-     "\000\040\200\000\006\000\000\000\000\000\010\000\000\000\020\002\000\300\220\050\037\000"},
+     "\000\040\200\000\006\000\000\000\000\000\010\000\000\000\020\002\000\300\220\050\037\000\000"},
     {336, "comp_iter", 0, 2, states_80,
-     "\000\000\040\000\000\000\000\000\000\000\000\000\042\000\000\000\000\000\000\000\000\000"},
-    {337, "comp_for", 0, 7, states_81,
-     "\000\000\040\000\000\000\000\000\000\000\000\000\040\000\000\000\000\000\000\000\000\000"},
-    {338, "comp_if", 0, 4, states_82,
-     "\000\000\000\000\000\000\000\000\000\000\000\000\002\000\000\000\000\000\000\000\000\000"},
-    {339, "encoding_decl", 0, 2, states_83,
-     "\000\000\200\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
-    {340, "yield_expr", 0, 3, states_84,
-     "\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\100"},
-    {341, "yield_arg", 0, 3, states_85,
-     "\000\040\200\000\000\000\000\000\000\040\010\000\000\000\020\002\000\300\220\050\037\000"},
-};
-static label labels[176] = {
+     "\000\000\040\000\000\000\000\000\000\000\000\000\042\000\000\000\000\000\000\000\000\000\000"},
+    {337, "sync_comp_for", 0, 6, states_81,
+     "\000\000\000\000\000\000\000\000\000\000\000\000\040\000\000\000\000\000\000\000\000\000\000"},
+    {338, "comp_for", 0, 3, states_82,
+     "\000\000\040\000\000\000\000\000\000\000\000\000\040\000\000\000\000\000\000\000\000\000\000"},
+    {339, "comp_if", 0, 4, states_83,
+     "\000\000\000\000\000\000\000\000\000\000\000\000\002\000\000\000\000\000\000\000\000\000\000"},
+    {340, "encoding_decl", 0, 2, states_84,
+     "\000\000\200\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
+    {341, "yield_expr", 0, 3, states_85,
+     "\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\200\000"},
+    {342, "yield_arg", 0, 3, states_86,
+     "\000\040\200\000\000\000\000\000\000\040\010\000\000\000\020\002\000\300\220\050\037\000\000"},
+};
+static label labels[177] = {
     {0, "EMPTY"},
     {256, 0},
     {4, 0},
@@ -2099,7 +2111,7 @@ static label labels[176] = {
     {333, 0},
     {263, 0},
     {262, 0},
-    {55, 0},
+    {1, "async"},
     {1, "def"},
     {1, 0},
     {264, 0},
@@ -2128,7 +2140,7 @@ static label labels[176] = {
     {274, 0},
     {273, 0},
     {275, 0},
-    {340, 0},
+    {341, 0},
     {314, 0},
     {36, 0},
     {37, 0},
@@ -2229,7 +2241,7 @@ static label labels[176] = {
     {31, 0},
     {322, 0},
     {323, 0},
-    {54, 0},
+    {1, "await"},
     {324, 0},
     {326, 0},
     {325, 0},
@@ -2243,21 +2255,22 @@ static label labels[176] = {
     {1, "None"},
     {1, "True"},
     {1, "False"},
-    {337, 0},
+    {338, 0},
     {327, 0},
     {328, 0},
     {329, 0},
     {1, "class"},
     {335, 0},
     {336, 0},
-    {338, 0},
     {339, 0},
+    {337, 0},
+    {340, 0},
     {1, "yield"},
-    {341, 0},
+    {342, 0},
 };
 grammar _PyParser_Grammar = {
-    86,
+    87,
     dfas,
-    {176, labels},
+    {177, labels},
     256
 };



More information about the Python-checkins mailing list